Example usage for com.mongodb.client MongoCollection insertOne

List of usage examples for com.mongodb.client MongoCollection insertOne

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection insertOne.

Prototype

void insertOne(TDocument document);

Source Link

Document

Inserts the provided document.

Usage

From source file:module.script.emtab365.ImportSamplesEMTAB365.java

License:Open Source License

public ImportSamplesEMTAB365() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // ===== Collections ======
    MongoCollection<Document> collectionPlatforms = db.getCollection("platforms");
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "E-MTAB-365.sdrf.xlsx";
    System.out.println("LOADING \t " + inputfile);
    excelService.load(inputfile);//  w  w w  . j  a  va 2s  . c o  m

    // ===== Init values ======

    String idSeries = "E-MTAB-365";
    List<String> listSeries = new ArrayList<String>();
    listSeries.add(idSeries);
    Document docSeries = collectionSeries.find(Filters.eq("_id", idSeries)).first();
    String organism = "Homo sapiens";

    // ==== Header processing ====

    Map<Integer, String> mapHeader = new HashMap<Integer, String>();
    for (int i = 0; i < excelService.getHeader().size(); i++) {
        String headerItem = (String) excelService.getHeader().get(i);
        if (headerItem != null && headerItem.contains("[")) {
            String[] parts = headerItem.split("[\\[\\]]");
            headerItem = parts[1];
            headerItem = headerItem.replaceAll("[:_\\.]", " ");
        }
        mapHeader.put(i, headerItem.trim());
    }
    System.out.println(mapHeader);

    for (int i = 0; i < excelService.getData().size(); i++) {
        // for (int i=0; i<1; i++) {

        List<Object> dataline = excelService.getData().get(i);
        String idSample = (String) dataline.get(0);

        if (!idSample.equals("pool XX")) {

            String idPlatform = ((String) dataline.get(54)).trim();
            if (idPlatform.contains("A-AFFY-44")) {
                idPlatform = "GPL570";
            } else {
                Document docPlatform = mongoService.createPlatform(idPlatform, null, "9606", "Homo sapiens",
                        null, null, null, null);
                UpdateResult res = collectionPlatforms.updateOne(
                        Filters.eq("_id", docPlatform.getString("_id")), new Document("$set", docPlatform));
                if (res.getMatchedCount() == 0) {
                    collectionPlatforms.insertOne(docPlatform);
                }
            }

            Document docSample = mongoService.createSample(idSample, idSeries, listSeries, organism,
                    (Date) docSeries.get("submission_date"), (Date) docSeries.get("last_update"), false);

            // === exp_group ===

            Document expgroup = mongoService.createExpGroup(docSample, idPlatform, null, null, organism);
            docSample.append("exp_group", expgroup);

            // === parameters ===

            Map<String, Object> mapParameters = new HashMap<String, Object>();

            for (int j = 0; j < dataline.size(); j++) {

                String key = mapHeader.get(j);
                Object value = dataline.get(j);

                if (value instanceof String) {
                    String valueString = ((String) value).trim();
                    if (valueString != null && !valueString.isEmpty() && !valueString.equals("NA")
                            && !valueString.equals("ND")) {
                        value = valueString;
                    } else {
                        value = null;
                    }
                }

                if (key != null && value != null) {
                    mapParameters.put(key, value);
                    // System.out.println(key + "='" + value+"'");
                }
            }

            Document parameters = mongoService.createParameters(docSample, mapParameters);
            docSample.append("parameters", parameters);

            // === Delete if already exist ===
            collectionSamples.deleteOne(Filters.eq("_id", docSample.getString("_id")));

            // ===== Insert data =====
            collectionSamples.insertOne(docSample);

            System.out.println(docSample);

        }
    }

    mongoClient.close();
}

From source file:module.script.ImportArrayExpress1733.java

License:Open Source License

public ImportArrayExpress1733() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Pattern =====
    String patternText = "\\[[\\p{Print}\\p{Space}]+\\]";
    ;/*from  w  w  w .  j a  va 2s .  c  o m*/
    Pattern pattern = Pattern.compile(patternText);

    // ===== Series =====

    for (String accession : listAccessions) {

        List<String> accessionAsList = new ArrayList<String>();
        accessionAsList.add(accession);

        String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession
                + ".idf.txt";
        System.out.println(urlString);
        String text = webService.loadUrl(urlString);

        String[] parts = text.split(lineSeparator);
        List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts));

        AESeries series = new AESeries(dataSeries);
        System.out.println(series);

        // ===== Check if already imported as a GSE ===== 
        boolean isGseFound = false;
        String gseNumber = null;
        for (String secondaryAccession : series.getListAccessions()) {
            if (secondaryAccession.startsWith("GSE")) {
                gseNumber = secondaryAccession;
                Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first();
                isGseFound = gse != null;

            }
        }

        int nbImportedSamples = 0;

        if (!isGseFound) {

            // ===== Create Mongo series =====

            Document docSeries = mongoService.createSeries(accession, series.getTitle(), null,
                    series.getSubmissionDate(), series.getSubmissionDate());

            if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) {
                docSeries.put("secondary_accessions", series.getListAccessions());
            }

            if (false) {
                UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession),
                        new Document("$set", docSeries));
                if (updateResult.getMatchedCount() == 0) {
                    collectionSeries.insertOne(docSeries);
                }
            }

            System.out.println(docSeries);

            // ===== Import clinical data =====

            String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf();
            System.out.println(url);
            String clindata = webService.loadUrl(url);

            String[] clinparts = clindata.split(lineSeparator);
            List<String> data = new ArrayList<String>(Arrays.asList(clinparts));

            // ===== Recognize samples =====

            List<String> header = this.createHeader(data.get(0), pattern);
            System.out.println(header);

            for (int i = 1; i < data.size(); i++) {

                Integer nbSamples = data.size() - 1;

                Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header);
                String idSample = this.createIdSample(mapParameters);

                if (idSample == null) {
                    System.err.println("ERROR: idSample is not recongnized for " + accession);
                    System.out.println("Line " + i);
                    System.out.println(mapParameters);
                    mongoClient.close();
                    System.exit(0);
                } else {
                    if (formatIdSample) {
                        idSample = "E-MTAB-2836" + "-" + idSample;
                        idSample = idSample.trim().replaceAll(" ", "-");
                    }
                }
                idSample = idSample.split(" ")[0].trim();

                // === Organism ===
                String organism = (String) mapParameters.get("organism");
                if (organism == null || organism.isEmpty()) {
                    organism = defaultOrganism;
                }

                // === Platform ===
                String platform = (String) mapParameters.get("LIBRARY_STRATEGY");
                if (platform != null && !platform.isEmpty()) {
                    platform = platform.toLowerCase().trim();
                } else {
                    platform = defaultPlatform;
                }

                Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first();
                boolean docAlreadyExist = docSampleExist != null;

                System.out.println("docAlreadyExist " + docAlreadyExist);

                // === Delete old if already exist ===
                if (docAlreadyExist) {
                    List<String> listSeries = (List<String>) docSampleExist.get("series");
                    Set<String> setSeries = new HashSet<String>();
                    listSeries.add(accession);
                    setSeries.addAll(listSeries);
                    listSeries.clear();
                    listSeries.addAll(setSeries);
                    docSampleExist.append("series", listSeries);

                    System.out.println(docSampleExist);

                    if (commit) {
                        collectionSamples.deleteOne(eq("_id", docSampleExist.get("_id")));
                        collectionSamples.insertOne(docSampleExist);
                    }

                }

            }

        } else {
            System.out.println("GEO accession " + gseNumber + " corresponding to  " + accession
                    + " exists already. Skip import.");
        }

        System.out.println("Number of imported samples: " + nbImportedSamples);

    }

    mongoClient.close();

}

From source file:module.script.pro12.CreatePro12.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public CreatePro12() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    // MongoDatabase db = mongoClient.getDatabase("geo");

    MongoCollection<Document> collectionSeries = db.getCollection("series");

    Date submissionDate = null;/*from w  ww  .  j a v  a2 s .com*/
    try {
        submissionDate = dateFormat.parse("2010-01-01");
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    List<String> platforms = new ArrayList<String>();
    platforms.add("GPL570");

    Document docSeries = new Document();
    docSeries.append("_id", "PRO12").append("title", "Project 12: Reference tissues and cell lines")
            .append("platforms", platforms).append("submission_date", submissionDate)
            .append("last_update", submissionDate).append("import_date", new Date());

    System.out.println(docSeries);
    collectionSeries.insertOne(docSeries);

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.proallchen.CreateProallChen.java

License:Open Source License

public CreateProallChen() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    // MongoDatabase db = mongoClient.getDatabase("geo");

    MongoCollection<Document> collectionSeries = db.getCollection("series");

    Date submissionDate = null;/*ww  w .ja v a2s .  c om*/
    try {
        submissionDate = dateFormat.parse("2016-05-13");
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    // List<String> platforms = new ArrayList<String>();
    // platforms.add("GPL570");

    Document docSeries = new Document();
    docSeries.append("_id", "PROALL_CHEN")
            .append("title", "Genomic Profiling of Adult and Pediatric B-cell Acute Lymphoblastic Leukemia")
            .append("platforms", null).append("submission_date", submissionDate)
            .append("last_update", submissionDate).append("import_date", new Date());

    System.out.println(docSeries);
    collectionSeries.insertOne(docSeries);

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.proallchen.ImportProallChenSupplementary.java

License:Open Source License

public ImportProallChenSupplementary() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // ===== Samples ======

    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator()
            + "DB_ALL_JIN_RNASEC_clinical_data_supplementary.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);/*from  ww  w  . jav a 2 s  .com*/

    System.out.println(excelService.getHeader());

    String idSeries = "PROALL_CHEN";
    List<String> listSeries = new ArrayList<String>();
    listSeries.add(idSeries);

    for (int i = 0; i < excelService.getData().size(); i++) {

        List<Object> line = excelService.getData().get(i);

        String idSample = "ESM" + line.get(0);

        System.out.println(idSample + " " + line);

        // ===== Sample Document =====

        Document docSample = new Document();

        docSample.append("_id", idSample).append("main_gse_number", idSeries).append("series", listSeries)
                .append("organism", "Homo sapiens").append("submission_date", today)
                .append("last_update", today).append("import_date", today).append("analyzed", false);

        // ===== Mandatory parameters =====
        Document expGroup = generateExpGroup(idSeries, idSample);
        docSample.append("exp_group", expGroup);

        // ===== Supplementary parameters =====

        Document parameters = new Document();
        parameters.append("id_sample", idSample);

        // === Attributes ===

        for (int j = 0; j < excelService.getHeader().size(); j++) {

            String header = (String) excelService.getHeader().get(j);
            Object value = line.get(j);
            // System.out.println(header + " = " + value);

            parameters.append(header, value);

        }
        docSample.append("parameters", parameters);

        System.out.println(docSample);

        // === Delete if already exist ===
        collectionSamples.deleteOne(eq("_id", idSample));

        // ===== Insert data =====
        collectionSamples.insertOne(docSample);

    }

    mongoClient.close();
}

From source file:module.script.probcp.CreateProbcp.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public CreateProbcp() {

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    // MongoDatabase db = mongoClient.getDatabase("geo");

    MongoCollection<Document> collectionSeries = db.getCollection("series");

    Date submissionDate = null;//  w  w w.  j  a v  a2 s  .  c  om
    try {
        submissionDate = dateFormat.parse("2017-03-20");
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    List<String> platforms = new ArrayList<String>();
    platforms.add("proteomics");

    Document docSeries = new Document();
    docSeries.append("_id", "PROBCP").append("title", "Breast cancer proteomics").append("platforms", platforms)
            .append("submission_date", submissionDate).append("last_update", submissionDate)
            .append("import_date", new Date());

    System.out.println(docSeries);
    collectionSeries.insertOne(docSeries);

    mongoClient.close();
}

From source file:module.script.probcp.ImportSamplesProbcp.java

License:Open Source License

public ImportSamplesProbcp() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    String[] studies = { "tya16", "law15" };
    List<String> series = new ArrayList<String>();

    for (int l = 0; l < studies.length; l++) {

        String idStudy = studies[l];
        String studyName = idStudy.toUpperCase();

        series.clear();//from w w w .  j a va  2s  . c o m
        series.add(studyName);
        series.add("PROBCP");

        String sql = "select * from st_bcp." + idStudy + "_sample order by id_sample";

        List<Object> listSamples = session.createSQLQuery(sql).list();

        for (int i = 0; i < listSamples.size(); i++) {

            Object[] lineSample = (Object[]) listSamples.get(i);

            String idSample = (String) lineSample[0];
            String clinicalClassification = (String) lineSample[1];
            String tnmStage = (String) lineSample[2];
            Integer grade = (Integer) lineSample[3];
            String type = (String) lineSample[4];

            System.out.println(Arrays.toString(lineSample));

            // ===== Collection method ====
            String collectionMethod = "biopsy";
            if (idStudy.equals("law15") && !idSample.startsWith("Tumor")) {
                collectionMethod = "cell line";
            }

            // ==== Topology ====
            ClTopology topology = session.get(ClTopology.class, "C50.9");

            // === Morphology ===
            ClMorphology morphology = session.get(ClMorphology.class, "8010/3"); // carcinoma
            ClMorphology idc = session.get(ClMorphology.class, "8500/3"); // inf. duct. carcinoma
            ClMorphology lo = session.get(ClMorphology.class, "8520/3"); // lobular carcinoma
            ClMorphology ac = session.get(ClMorphology.class, "8140/3"); // adenocarcinoma

            if (type != null && (type.contains("IDC") || type.contains("DC") || type.contains("ductal"))) {
                morphology = idc;
            }
            if (type != null && type.contains("Lo")) {
                morphology = lo;
            }

            if (type != null && (type.contains("AC") || type.contains("adeno"))) {
                morphology = ac;
            }

            // ===== Sample Document =====

            Document docSample = new Document();

            docSample.append("_id", studyName + "_" + idSample).append("main_gse_number", studyName)
                    .append("series", series).append("organism", "Homo sapiens")
                    .append("submission_date", today).append("last_update", today).append("import_date", today)
                    .append("analyzed", true);

            // ===== Mandatory parameters =====

            Document expGroup = this.generateExpGroup(idSample, studyName, tnmStage, grade, type,
                    collectionMethod, topology, morphology);
            docSample.append("exp_group", expGroup);

            // ===== Supplementary parameters =====

            Document parameters = this.generateParameters(idSample);
            docSample.append("parameters", parameters);
            parameters.append("clinical_classification", clinicalClassification);
            parameters.append("tnm_stage", tnmStage);
            parameters.append("grade", grade);
            parameters.append("type", type);

            // === Append parameters to document ===

            docSample.append("parameters", parameters);

            // === Save ===
            collectionSamples.insertOne(docSample);

            System.out.println(docSample);

        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();

}

From source file:module.test.CreateProbesets.java

License:Open Source License

public CreateProbesets() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionProbesets = db.getCollection("probesets");

    Document docProbeset = mongoService.createProbeset("GPL570", "1007_s_at");
    collectionProbesets.insertOne(docProbeset);
    mongoClient.close();/*from w  w w  .jav  a 2  s  . c o m*/

}

From source file:module.test.ImportProbesets.java

License:Open Source License

public ImportProbesets() {

    // === Display ===
    System.out.println("\n================ BEGIN Module " + this.getClass().getName() + "================");

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionProbesets = db.getCollection("probesets");

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    String idPlatform = "GPL570";
    String gpl = idPlatform.toLowerCase();

    // ===== Load file =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "HG-U133_Plus_2.na36.annot.csv";

    System.out.println("ID Platform " + gpl);
    System.out.println("LOADING \t " + inputfile);
    System.out.println("Please wait... ");
    List<String> listRows = fileService.loadTextFile(inputfile);
    System.out.println("File sucessfully LOADED");

    // ===== Recognize header =====

    List<String> header = fileService.readHeader(listRows, "\",\"");
    System.out.println("Header " + header);
    List<List<String>> data = fileService.readData(listRows, "\",\"");
    System.out.println(/*  www.j av  a2s  .  c o m*/
            "The data are sucessfully loaded: rows " + data.size() + ", columns " + data.get(0).size());

    Integer indProbeset = fileService.findIndex(header, "Probe Set ID");
    Integer indGenes = fileService.findIndex(header, "Entrez Gene");
    Integer indUnigenes = fileService.findIndex(header, "UniGene ID");
    Integer indTranscripts = fileService.findIndex(header, "RefSeq Transcript ID");
    Integer indGb = fileService.findIndex(header, "Representative Public ID");

    for (int i = 0; i < 5; i++) {
        List<String> dataline = data.get(i);

        String probeset = dataline.get(indProbeset);
        String genes = dataline.get(indGenes);
        String unigenes = dataline.get(indUnigenes);
        String transcripts = dataline.get(indTranscripts);
        String gb = dataline.get(indGb);

        System.out.println(probeset + "\t" + genes + "\t" + formatService.splitInArray(unigenes, "///") + "\t"
                + gb + "\t" + transcripts);

        Document docProbeset = mongoService.createProbeset(idPlatform, probeset);
        docProbeset.put("genes", formatService.splitInArray(genes, "///"));
        docProbeset.put("unigenes", formatService.splitInArray(unigenes, "///"));

        List<String> listTranscripts = formatService.splitInArray(transcripts, "///");
        listTranscripts.addAll(formatService.splitInArray(gb, "///"));

        docProbeset.put("transcripts", listTranscripts);

        collectionProbesets.insertOne(docProbeset);

        /*
        for (int j=0; j<dataline.size(); j++) {
           String key = header.get(j);
           String value = dataline.get(j);
           System.out.println(key + ": " + value);
        }
        */
    }

    /*
    String tableProbe = "hs.om_probe_" + gpl;
    String tableGP = "hs.om_gp_" + gpl;
    List<Object []> listProbesets = session.createNativeQuery("select * from " + tableProbe + " order by id_probe").getResultList();
    for (int i=0; i<10; i++) {
            
       Object[] line = listProbesets.get(i);
            
       System.out.println(Arrays.toString(line));
       // Document docProbeset = mongoService.createProbeset(idPlatform, "1007_s_at");
       // collectionProbesets.insertOne(docProbeset);
    }
     */

    // === Close connections ===

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();
    mongoClient.close();

    // === Display ===
    System.out.println("================ END Module " + this.getClass().getName() + "================");

}

From source file:mongodb.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *//*from  w  w w .j a va2 s. c o  m*/
public static void main(final String[] args) {

    //represents a pool of connections to the database
    MongoClient mongoClient = new MongoClient("10.9.17.105", 27017);

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("test");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    collection.find().forEach(printBlock);

    // Clean up
    //        database.drop();

    // release resources
    mongoClient.close();
}