Example usage for com.mongodb.client MongoCollection updateOne

List of usage examples for com.mongodb.client MongoCollection updateOne

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection updateOne.

Prototype

UpdateResult updateOne(Bson filter, List<? extends Bson> update);

Source Link

Document

Update a single document in the collection according to the specified arguments.

Usage

From source file:module.script.TransferBrbHistologyCodes.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public TransferBrbHistologyCodes() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select id_sample, main_gse_number, index_histology_code, histology_code, ordered_histology_code from db_brb_lung.view_exp_group";

    List<Object> list = session.createSQLQuery(sql).setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP).list();

    for (Object item : list) {

        Map<String, Object> map = (HashMap<String, Object>) item;

        String gsmNumber = (String) map.get("id_sample");
        String gseNumber = (String) map.get("main_gse_number");

        System.out.println("-----------------------------");
        System.out.println(gseNumber + " " + gsmNumber);

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        if (doc != null) {
            Document expGroup = (Document) doc.get("exp_group");
            expGroup.put("index_histology_code", map.get("index_histology_code"));
            expGroup.put("histology_code", map.get("histology_code"));
            expGroup.put("ordered_histology_code", map.get("ordered_histology_code"));
            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                    new Document("$set", doc));
        }/*  www  . j  a v  a 2 s .co m*/

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.TransferExposure.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public TransferExposure() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select id_sample, main_gse_number, string_agg(id_substance, ', ') as list_substances from epimed_prod.om_sample "
            + "join epimed_prod.cl_biopatho using (id_biopatho) join epimed_prod.cl_patient using (id_patient) join epimed_prod.cl_exposure using (id_patient) "
            + "where exposed=true group by id_sample";

    List<Object> list = session.createSQLQuery(sql).setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP).list();

    for (Object item : list) {

        Map<String, Object> map = (HashMap<String, Object>) item;

        String gsmNumber = (String) map.get("id_sample");
        String gseNumber = (String) map.get("main_gse_number");

        System.out.println("-----------------------------");
        System.out.println(gseNumber + " " + gsmNumber);

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        if (doc != null) {
            Document expGroup = (Document) doc.get("exp_group");
            expGroup.put("exposure", map.get("list_substances"));
            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                    new Document("$set", doc));
        }//from  w  ww .j  a v a 2 s  .  com

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.TransferFromEpimedProd.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public TransferFromEpimedProd() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select * from epimed_prod.view_exp_group order by main_gse_number, id_sample";
    List<Object> list = session.createSQLQuery(sql).setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP).list();

    String[] commonAttributes = { "pathology", "tnm_stage", "id_topology_group", "histology_subtype",
            "tissue_stage", "dead", "dfs_months", "age_max", "morphology", "id_tissue_stage", "topology", "sex",
            "age_min", "m", "topology_group", "n", "t", "collection_method", "id_morphology", "relapsed",
            "histology_type", "os_months", "id_topology" };

    List<ClPathology> listPathology = session.createCriteria(ClPathology.class).list();
    Map<String, ClPathology> mapPathology = new HashMap<String, ClPathology>();
    for (ClPathology p : listPathology) {
        mapPathology.put(p.getName(), p);
    }//from  w  w w .  j a  v a  2s.  c  om

    List<ClTissueStatus> listTissueStatus = session.createCriteria(ClTissueStatus.class).list();
    Map<Integer, ClTissueStatus> mapTissueStatus = new HashMap<Integer, ClTissueStatus>();
    for (ClTissueStatus t : listTissueStatus) {
        mapTissueStatus.put(t.getIdTissueStatus(), t);
    }

    System.out.println(listTissueStatus);

    for (Object item : list) {

        Map<String, Object> map = (HashMap<String, Object>) item;

        String gsmNumber = (String) map.get("id_sample");
        String gseNumber = (String) map.get("main_gse_number");

        System.out.println("-----------------------------");
        System.out.println(gseNumber + " " + gsmNumber);

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        if (doc != null) {
            Document expGroup = (Document) doc.get("exp_group");
            for (int j = 0; j < commonAttributes.length; j++) {
                String attr = commonAttributes[j];
                expGroup.put(attr, map.get(attr));
            }

            // Treatment
            expGroup.put("treatment", map.get("treatment_type"));

            // Pathology
            String pathoString = (String) map.get("pathology");
            ClPathology pathology = mapPathology.get(pathoString);
            if (pathology != null) {
                expGroup.put("id_pathology", pathology.getIdPathology());
                expGroup.put("pathology", pathology.getName());
            }

            // Tissue status
            Integer idTissueStatus = (Integer) map.get("id_tissue_status");
            if (idTissueStatus != null && idTissueStatus > 3) {
                idTissueStatus = 3;
            }
            ClTissueStatus tissueStatus = mapTissueStatus.get(idTissueStatus);
            if (tissueStatus != null) {
                expGroup.put("id_tissue_status", tissueStatus.getIdTissueStatus());
                expGroup.put("tissue_status", tissueStatus.getName());
            }

            System.out.println("idTissueStatus=" + tissueStatus + ", pathology=" + pathology);
            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                    new Document("$set", doc));
        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.UpdateNumberSamples.java

License:Open Source License

public UpdateNumberSamples() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("sample");

    // String [] listIdSeries = {"TISSUE_SPECIFIC_GENES_MM"};
    // List<Document> listSeries = collectionSeries.find(Filters.in("_id", listIdSeries)).into(new ArrayList<Document>());

    List<Document> listSeries = collectionSeries.find().into(new ArrayList<Document>());

    for (Document ser : listSeries) {
        System.out.println(ser);// www  .j  a v a  2  s.  com

        String idSeries = ser.getString("_id");
        Bson filter = Filters.in("series", idSeries);

        Long nbSamples = collectionSamples.count(filter);

        System.out.println(idSeries + " " + nbSamples);

        ser.append("nb_samples", nbSamples);
        collectionSeries.updateOne(Filters.eq("_id", idSeries), new Document("$set", ser));

    }

    mongoClient.close();

}

From source file:module.UpdateTissueStatus.java

License:Open Source License

public UpdateTissueStatus() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSample = db.getCollection("sample");

    List<Document> samples = collectionSample.find().into(new ArrayList<Document>());
    ;/*from  www .j a  va 2s  .com*/

    for (Document sample : samples) {

        Document expgroup = sample.get("exp_group", Document.class);
        Integer idTissueStatus = expgroup.getInteger("id_tissue_status");
        if (idTissueStatus != null) {
            if (idTissueStatus.equals(1)) {
                expgroup.put("tissue_status", "normal");
            }
            if (idTissueStatus.equals(2)) {
                expgroup.put("tissue_status", "pathological_non_tumoral");
            }
            if (idTissueStatus.equals(3)) {
                expgroup.put("tissue_status", "tumoral");
            }

            sample.put("exp_group", expgroup);

            System.out.println(sample.get("exp_group", Document.class));

            collectionSample.updateOne(Filters.eq("_id", sample.getString("_id")),
                    new Document("$set", sample));

        }

    }

    mongoClient.close();

}

From source file:mongodb.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///  w  ww .j av a 2  s. c  o m
public static void main(final String[] args) {

    //represents a pool of connections to the database
    MongoClient mongoClient = new MongoClient("10.9.17.105", 27017);

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("test");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    collection.find().forEach(printBlock);

    // Clean up
    //        database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongoSample.MongoSample.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args//from   w  w  w.  java  2 s  .c o m
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    String mongoServer = args[0];

    MongoClient mongoClient = new MongoClient(mongoServer);
    MongoDatabase database = mongoClient.getDatabase("sakila");
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest
    // earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can
    // explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc);
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    // collection.find().forEach(printBlock);

    // Clean up
    //database.drop();

    // release resources
    mongoClient.close();
}

From source file:net.acesinc.nifi.processors.mongodb.PartialUpdateMongo.java

protected UpdateResult performSingleUpdate(Document query, Document updateDocument, ProcessContext context,
        ProcessSession session) {/*from www .j  a va  2 s  .c o m*/
    final ProcessorLog logger = getLogger();
    StopWatch watch = new StopWatch(true);

    final String mode = context.getProperty(MODE).getValue();

    final WriteConcern writeConcern = getWriteConcern(context);
    final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern);

    UpdateResult result = null;
    if (!updateDocument.isEmpty()) {
        watch.start();
        //            logger.info("Running Mongo Update with query: " + query + " and document: " + updateDocument);
        switch (mode) {
        case MODE_SINGLE:
            result = collection.updateOne(query, updateDocument);
            break;
        case MODE_MANY:
            result = collection.updateMany(query, updateDocument);
            break;
        }
        watch.stop();

        logger.info("Running Mongo Update with query: " + query + " and document: " + updateDocument + " took "
                + watch.getDuration(TimeUnit.MILLISECONDS) + "ms");
        return result;

    } else {
        //nothing to do
        return null;

    }
}

From source file:net.netzgut.integral.mongo.internal.services.MongoODMImplementation.java

License:Apache License

@Override
public <T extends Serializable> UpdateResult update(Bson filter, T entity) {
    Class<? extends Serializable> entityClass = entity.getClass();
    MongoCollection<Document> collection = this.mongo.getCollection(entityClass);
    Document document = this.converter.documentFrom(entity);
    Bson update = Filters.eq("$set", document);
    return collection.updateOne(filter, update);
}

From source file:net.netzgut.integral.mongo.internal.services.MongoODMImplementation.java

License:Apache License

@Override
public <T extends Serializable> UpdateResult update(Bson filter, Class<T> entityClass,
        Map<String, Object> updateMap) {
    MongoCollection<Document> collection = this.mongo.getCollection(entityClass);
    Bson update = Filters.eq("$set", new Document(updateMap));
    return collection.updateOne(filter, update);
}