List of usage examples for com.mongodb.client MongoCollection updateOne
UpdateResult updateOne(Bson filter, List<? extends Bson> update);
From source file:module.script.emtab365.ImportSamplesEMTAB365.java
License:Open Source License
public ImportSamplesEMTAB365() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); // ===== Collections ====== MongoCollection<Document> collectionPlatforms = db.getCollection("platforms"); MongoCollection<Document> collectionSeries = db.getCollection("series"); MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Excel data loader ===== String inputfile = this.getInputDirectory() + this.getDirSeparator() + "E-MTAB-365.sdrf.xlsx"; System.out.println("LOADING \t " + inputfile); excelService.load(inputfile);/*from w ww. j a v a 2 s. c o m*/ // ===== Init values ====== String idSeries = "E-MTAB-365"; List<String> listSeries = new ArrayList<String>(); listSeries.add(idSeries); Document docSeries = collectionSeries.find(Filters.eq("_id", idSeries)).first(); String organism = "Homo sapiens"; // ==== Header processing ==== Map<Integer, String> mapHeader = new HashMap<Integer, String>(); for (int i = 0; i < excelService.getHeader().size(); i++) { String headerItem = (String) excelService.getHeader().get(i); if (headerItem != null && headerItem.contains("[")) { String[] parts = headerItem.split("[\\[\\]]"); headerItem = parts[1]; headerItem = headerItem.replaceAll("[:_\\.]", " "); } mapHeader.put(i, headerItem.trim()); } System.out.println(mapHeader); for (int i = 0; i < excelService.getData().size(); i++) { // for (int i=0; i<1; i++) { List<Object> dataline = excelService.getData().get(i); String idSample = (String) dataline.get(0); if (!idSample.equals("pool XX")) { String idPlatform = ((String) dataline.get(54)).trim(); if (idPlatform.contains("A-AFFY-44")) { idPlatform = "GPL570"; } else { Document docPlatform = mongoService.createPlatform(idPlatform, null, "9606", "Homo sapiens", null, null, null, null); UpdateResult res = collectionPlatforms.updateOne( Filters.eq("_id", docPlatform.getString("_id")), new Document("$set", docPlatform)); if (res.getMatchedCount() == 0) { collectionPlatforms.insertOne(docPlatform); } } Document docSample = mongoService.createSample(idSample, idSeries, listSeries, organism, (Date) docSeries.get("submission_date"), (Date) docSeries.get("last_update"), false); // === exp_group === Document expgroup = mongoService.createExpGroup(docSample, idPlatform, null, null, organism); docSample.append("exp_group", expgroup); // === parameters === Map<String, Object> mapParameters = new HashMap<String, Object>(); for (int j = 0; j < dataline.size(); j++) { String key = mapHeader.get(j); Object value = dataline.get(j); if (value instanceof String) { String valueString = ((String) value).trim(); if (valueString != null && !valueString.isEmpty() && !valueString.equals("NA") && !valueString.equals("ND")) { value = valueString; } else { value = null; } } if (key != null && value != null) { mapParameters.put(key, value); // System.out.println(key + "='" + value+"'"); } } Document parameters = mongoService.createParameters(docSample, mapParameters); docSample.append("parameters", parameters); // === Delete if already exist === collectionSamples.deleteOne(Filters.eq("_id", docSample.getString("_id"))); // ===== Insert data ===== collectionSamples.insertOne(docSample); System.out.println(docSample); } } mongoClient.close(); }
From source file:module.script.emtab365.UpdateSamplesEMTAB365.java
License:Open Source License
public UpdateSamplesEMTAB365() { // ===== Session PostgreSQL ===== SessionFactory sessionFactory = HibernateUtil .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml"); Session session = sessionFactory.openSession(); // ===== INIT ===== ClMorphology ductal = session.get(ClMorphology.class, "8500/3"); // 8500/3 Infiltrating duct carcinoma, NOS (C50._) ClMorphology lobular = session.get(ClMorphology.class, "8520/3"); // 8520/3 Lobular carcinoma, NOS (C50._) ClMorphology morphology = session.get(ClMorphology.class, "8010/3"); // Carcinoma ClTopology breast = session.get(ClTopology.class, "C50.9"); // Breast ClTopology blood = session.get(ClTopology.class, "C42.0"); // Blood ClTopology lymphNode = session.get(ClTopology.class, "C77.9"); // Lymph node // ===== Session Mongo ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collection = db.getCollection("samples"); List<Document> listDocuments = collection.find(Filters.in("series", gseNumber)) .into(new ArrayList<Document>()); for (int i = 0; i < listDocuments.size(); i++) { Document doc = listDocuments.get(i); Document expgroup = (Document) doc.get("exp_group"); Document parameters = (Document) doc.get("parameters"); String histoType = parameters.getString("Histology"); String histoSubtype = parameters.getString("CIT classification"); expgroup.put("histology_subtype", histoSubtype); if (histoType != null && histoType.toLowerCase().equals("lobular")) { morphology = lobular;//from w w w. j a va2 s . co m } if (histoType != null && histoType.toLowerCase().equals("ductal")) { morphology = ductal; } expgroup.put("id_morphology", morphology.getIdMorphology()); expgroup.put("morphology", morphology.getName()); expgroup.put("sample_source", parameters.getString("Source Name")); String organismPart = parameters.getString("OrgansimPart"); ClTopology topology = null; if (organismPart != null) { if (organismPart.toLowerCase().contains("breast")) { topology = breast; } if (organismPart.toLowerCase().contains("blood")) { topology = blood; } if (organismPart.toLowerCase().contains("lymph")) { topology = lymphNode; } } else { topology = breast; } expgroup.put("id_topology", topology.getIdTopology()); expgroup.put("topology", topology.getName()); expgroup.put("id_topology_group", topology.getClTopologyGroup().getIdGroup()); expgroup.put("topology_group", topology.getClTopologyGroup().getName()); // ==== Survival ===== Object dfs_months = parameters.get("Delay Metastasis Free Survival months"); if (dfs_months != null) { expgroup.put("dfs_months", dfs_months); } Object os_months = parameters.get("Delay Overall Survival months"); if (os_months != null) { expgroup.put("os_months", os_months); } Double os = (Double) expgroup.get("os_months"); Double dfs = (Double) expgroup.get("dfs_months"); if (os != null && dfs != null && dfs.equals(os)) { expgroup.put("relapsed", false); } if (os != null && dfs != null && dfs < os) { expgroup.put("relapsed", true); } if (os != null && dfs != null && dfs > os) { expgroup.put("relapsed", null); } Object relapseDate = parameters.get("Relapse Metastasis Date"); if (relapseDate != null) { expgroup.put("relapsed", true); } // ==== Grade ==== expgroup.put("tnm_grade", parameters.get("Grade Scarff Bloom Richardson")); // ==== Files ===== expgroup.put("ftp", parameters.getString("ArrayExpress FTP file")); expgroup.put("file_name", parameters.getString("Array Data File")); expgroup.remove("individual"); if (parameters.getString("Individual") != null) { expgroup.put("individual", parameters.getString("Individual")); } // ==== Biomarkers ==== /* String p53 = parameters.getString("Phenotype - TP53 Gene mutation Status"); expgroup.put("p53", value) String pr = parameters.getString("PGR Protein expression"); String er = parameters.getString("ESR1 Protein expression"); String her2 = parameters.getString("ERBB2 Protein expression"); */ doc.put("exp_group", expgroup); System.out.println(i + " " + doc.get("_id") + " " + doc.get("analyzed") + " " + expgroup); if (commit) { UpdateResult updateResult = collection.updateOne(Filters.eq("_id", doc.get("_id")), new Document("$set", doc)); } } if (session.isOpen()) { session.close(); } sessionFactory.close(); mongoClient.close(); }
From source file:module.script.epimed_ontology.AddEpimedGroupToSamples.java
License:Open Source License
public AddEpimedGroupToSamples() { // ===== Session PostgreSQL ===== SessionFactory sessionFactory = HibernateUtil .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml"); Session session = sessionFactory.openSession(); ClTopologyDao topologyDao = new ClTopologyDao(session); // ===== Session Mongo ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSample = db.getCollection("sample"); List<Document> samples = collectionSample.find().into(new ArrayList<Document>()); for (int i = 0; i < samples.size(); i++) { Document sample = samples.get(i); Document expgroup = sample.get("exp_group", Document.class); String idTopology = expgroup.getString("id_topology"); if (idTopology != null && !idTopology.isEmpty()) { ClTopology topology = topologyDao.find(idTopology); ClEpimedGroup grp1 = topology.getClEpimedGroup(); ClEpimedGroup grp2 = grp1.getParent(); ClEpimedGroup grp3 = grp2.getParent(); expgroup.append("tissue_group_level1", grp1.getName()); expgroup.append("tissue_group_level2", grp2.getName()); expgroup.append("tissue_group_level3", grp3.getName()); System.out.println((i + 1) + "/" + samples.size() + " " + expgroup); sample.append("exp_group", expgroup); collectionSample.updateOne(Filters.eq("_id", sample.getString("_id")), new Document("$set", sample)); }/*from ww w . ja va 2 s. c o m*/ } // === Commit transaction === // session.getTransaction().commit(); session.getTransaction().rollback(); if (session.isOpen()) { session.close(); } sessionFactory.close(); mongoClient.close(); }
From source file:module.script.epimed_ontology.UpdateFetalAdultOvary.java
License:Open Source License
public UpdateFetalAdultOvary() { // ===== Session PostgreSQL ===== SessionFactory sessionFactory = HibernateUtil .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml"); Session session = sessionFactory.openSession(); ClTopologyDao topologyDao = new ClTopologyDao(session); ClTopology adultOvary = topologyDao.find("C56.9"); ClTopology fetalOvary = topologyDao.find("E56.9"); // ===== Session Mongo ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSample = db.getCollection("sample"); Bson filters = Filters.and(Filters.eq("exp_group.id_topology", "C56.9"), // ovary Filters.eq("exp_group.id_tissue_stage", 1) // adult );/* w ww . jav a2 s . co m*/ List<Document> samples = collectionSample.find(filters).into(new ArrayList<Document>()); for (Document sample : samples) { Document expgroup = sample.get("exp_group", Document.class); expgroup.append("id_topology", adultOvary.getIdTopology()); expgroup.append("topology", adultOvary.getName()); sample.append("exp_group", expgroup); collectionSample.updateOne(Filters.eq("_id", sample.getString("_id")), new Document("$set", sample)); } System.out.println(samples.size()); // === Commit transaction === // session.getTransaction().commit(); session.getTransaction().rollback(); if (session.isOpen()) { session.close(); } sessionFactory.close(); mongoClient.close(); }
From source file:module.script.ImportArrayExpress1733.java
License:Open Source License
public ImportArrayExpress1733() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSeries = db.getCollection("series"); MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Pattern ===== String patternText = "\\[[\\p{Print}\\p{Space}]+\\]"; ;/* w w w . ja v a 2s . co m*/ Pattern pattern = Pattern.compile(patternText); // ===== Series ===== for (String accession : listAccessions) { List<String> accessionAsList = new ArrayList<String>(); accessionAsList.add(accession); String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession + ".idf.txt"; System.out.println(urlString); String text = webService.loadUrl(urlString); String[] parts = text.split(lineSeparator); List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts)); AESeries series = new AESeries(dataSeries); System.out.println(series); // ===== Check if already imported as a GSE ===== boolean isGseFound = false; String gseNumber = null; for (String secondaryAccession : series.getListAccessions()) { if (secondaryAccession.startsWith("GSE")) { gseNumber = secondaryAccession; Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first(); isGseFound = gse != null; } } int nbImportedSamples = 0; if (!isGseFound) { // ===== Create Mongo series ===== Document docSeries = mongoService.createSeries(accession, series.getTitle(), null, series.getSubmissionDate(), series.getSubmissionDate()); if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) { docSeries.put("secondary_accessions", series.getListAccessions()); } if (false) { UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } } System.out.println(docSeries); // ===== Import clinical data ===== String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf(); System.out.println(url); String clindata = webService.loadUrl(url); String[] clinparts = clindata.split(lineSeparator); List<String> data = new ArrayList<String>(Arrays.asList(clinparts)); // ===== Recognize samples ===== List<String> header = this.createHeader(data.get(0), pattern); System.out.println(header); for (int i = 1; i < data.size(); i++) { Integer nbSamples = data.size() - 1; Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header); String idSample = this.createIdSample(mapParameters); if (idSample == null) { System.err.println("ERROR: idSample is not recongnized for " + accession); System.out.println("Line " + i); System.out.println(mapParameters); mongoClient.close(); System.exit(0); } else { if (formatIdSample) { idSample = "E-MTAB-2836" + "-" + idSample; idSample = idSample.trim().replaceAll(" ", "-"); } } idSample = idSample.split(" ")[0].trim(); // === Organism === String organism = (String) mapParameters.get("organism"); if (organism == null || organism.isEmpty()) { organism = defaultOrganism; } // === Platform === String platform = (String) mapParameters.get("LIBRARY_STRATEGY"); if (platform != null && !platform.isEmpty()) { platform = platform.toLowerCase().trim(); } else { platform = defaultPlatform; } Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first(); boolean docAlreadyExist = docSampleExist != null; System.out.println("docAlreadyExist " + docAlreadyExist); // === Delete old if already exist === if (docAlreadyExist) { List<String> listSeries = (List<String>) docSampleExist.get("series"); Set<String> setSeries = new HashSet<String>(); listSeries.add(accession); setSeries.addAll(listSeries); listSeries.clear(); listSeries.addAll(setSeries); docSampleExist.append("series", listSeries); System.out.println(docSampleExist); if (commit) { collectionSamples.deleteOne(eq("_id", docSampleExist.get("_id"))); collectionSamples.insertOne(docSampleExist); } } } } else { System.out.println("GEO accession " + gseNumber + " corresponding to " + accession + " exists already. Skip import."); } System.out.println("Number of imported samples: " + nbImportedSamples); } mongoClient.close(); }
From source file:module.script.ImportSupplementaryGSE25219.java
License:Open Source License
@SuppressWarnings({ "unused", "unchecked" }) public ImportSupplementaryGSE25219() { // ===== Session PostgreSQL ===== SessionFactory sessionFactory = HibernateUtil .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml"); Session session = sessionFactory.openSession(); // ===== Session Mongo ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collection = db.getCollection("samples"); // ===== Excel data loader ===== String inputfile = this.getInputDirectory() + this.getDirSeparator() + "NIHMS321722-supplement-7.xlsx"; System.out.println("LOADING \t " + inputfile); ExcelService excelService = new ExcelService(); excelService.load(inputfile);//from www. j a va2 s. c o m // ===== Format raw data into data structures ====== List<Map<String, String>> listMap = new ArrayList<Map<String, String>>(); List<String> headerMap = new ArrayList<String>(); Map<String, String> mapBrain = new HashMap<String, String>(); for (int i = 0; i < excelService.getData().size(); i++) { List<Object> dataLine = excelService.getData().get(i); String brainCode = (String) dataLine.get(0); if (brainCode != null) { mapBrain = new HashMap<String, String>(); } // Existing brain code if (dataLine != null && dataLine.size() > 2 && dataLine.get(1) != null && dataLine.get(2) != null) { // System.out.println(dataLine.get(1) + " = " + dataLine.get(2)); mapBrain.put(dataLine.get(1).toString().trim(), dataLine.get(2).toString().trim()); } if (brainCode != null) { // New Brain code // System.out.println("brain code " + brainCode); headerMap.add(brainCode); listMap.add(mapBrain); } } // ===== Recognize data ===== for (int i = 0; i < headerMap.size(); i++) { System.out.println("----------------------------"); String code = headerMap.get(i); System.out.println(i + " " + code); Map<String, String> map = listMap.get(i); Map<String, String> updatedMap = new HashMap<String, String>(); for (Map.Entry<String, String> entry : map.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); if (!key.toLowerCase().equals("age") // && !key.toLowerCase().equals("ethnicity") // && !key.toLowerCase().equals("sex") && !value.toLowerCase().equals("no data")) { updatedMap.put(key, value); } // System.out.println(key + " -> " + value); } List<Document> listDocuments = collection .find(Filters.and(Filters.eq("exp_group.main_gse_number", "GSE25219"), Filters.eq("parameters.brain code", code))) .into(new ArrayList<Document>()); System.out.println("Number of corresponding Mongo documents " + listDocuments.size()); System.out.println(updatedMap); for (int j = 0; j < listDocuments.size(); j++) { Document doc = listDocuments.get(j); Document parameters = (Document) doc.get("parameters"); parameters.putAll(updatedMap); System.out.println("\t" + parameters); // Update Mongo document doc.put("parameters", parameters); doc.put("analyzed", true); UpdateResult updateResult = collection.updateOne(Filters.eq("_id", doc.get("_id")), new Document("$set", doc)); } } if (session.isOpen()) { session.close(); } sessionFactory.close(); mongoClient.close(); }
From source file:module.script.pro12.TransferPro12.java
License:Open Source License
@SuppressWarnings({ "unchecked" }) public TransferPro12() { // ===== Session PostgreSQL ===== SessionFactory sessionFactory = HibernateUtil .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml"); Session session = sessionFactory.openSession(); // ===== Session Mongo ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collection = db.getCollection("samples"); String sql = "select id_sample from epimed_prod.om_sample join epimed_prod.om_sample_series using (id_sample) " + "join epimed_prod.om_series using (id_series) where id_series='PRO12'"; List<String> list = session.createSQLQuery(sql).list(); Document pro12 = new Document(); pro12.append("series", "PRO12"); for (String gsmNumber : list) { Document doc = collection.find(Filters.eq("_id", gsmNumber)).first(); System.out.println("-----------------------------"); System.out.println(gsmNumber + " " + doc); if (doc != null) { // Update Mongo document collection.updateOne(Filters.eq("_id", gsmNumber), new Document("$push", pro12)); }/*from w w w . jav a2 s. c om*/ } if (session.isOpen()) { session.close(); } sessionFactory.close(); mongoClient.close(); }
From source file:module.script.proallchen.ImportProallChenOriginal.java
License:Open Source License
public ImportProallChenOriginal() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); // ===== Samples ====== MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Excel data loader ===== String inputfile = this.getInputDirectory() + this.getDirSeparator() + "PROALL_CHEN_clinical.xlsx"; System.out.println("LOADING \t " + inputfile); ExcelService excelService = new ExcelService(); excelService.load(inputfile);//from ww w. j a va 2 s .c o m System.out.println(excelService.getHeader()); String idSeries = "PROALL_CHEN"; List<String> listSeries = new ArrayList<String>(); listSeries.add(idSeries); for (int i = 0; i < excelService.getData().size(); i++) { List<Object> line = excelService.getData().get(i); String idSample = "ESM" + line.get(0); System.out.println(idSample + " " + line); Document docSample = collectionSamples.find(Filters.eq("_id", idSample.trim())).first(); System.out.println(docSample); Document parameters = (Document) docSample.get("parameters"); for (int j = 0; j < excelService.getHeader().size(); j++) { String header = (String) excelService.getHeader().get(j); Object value = line.get(j); // System.out.println(header + " = " + value); parameters.append(header, value); } System.out.println(parameters); // Update Mongo document docSample.put("parameters", parameters); UpdateResult updateResult = collectionSamples.updateOne(Filters.eq("_id", docSample.get("_id")), new Document("$set", docSample)); } mongoClient.close(); }
From source file:module.script.probcp.UpdateSamplesProbcp.java
License:Open Source License
public UpdateSamplesProbcp() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Session PostgreSQL ===== SessionFactory sessionFactory = HibernateUtil .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml"); Session session = sessionFactory.openSession(); String[] studies = { "tya16" }; List<String> series = new ArrayList<String>(); for (int l = 0; l < studies.length; l++) { String idStudy = studies[l]; String studyName = idStudy.toUpperCase(); series.clear();/*from w w w.j ava 2 s . c o m*/ series.add(studyName); series.add("PROBCP"); String sql = "select * from st_bcp." + idStudy + "_sample order by id_sample"; List<Object> listSamples = session.createSQLQuery(sql).list(); for (int i = 0; i < listSamples.size(); i++) { Object[] lineSample = (Object[]) listSamples.get(i); String idSample = (String) lineSample[0]; String clinicalClassification = (String) lineSample[1]; String tnmStage = (String) lineSample[2]; Integer grade = (Integer) lineSample[3]; String type = (String) lineSample[4]; System.out.println(Arrays.toString(lineSample)); String id = studyName + "_" + idSample; Document docSample = collectionSamples.find(Filters.eq("_id", id)).first(); Document expgroup = (Document) docSample.get("exp_group"); expgroup.append("tnm_grade", grade); expgroup.append("tnm_stage", null); docSample.append("exp_group", expgroup); UpdateResult updateResult = collectionSamples.updateOne(Filters.eq("_id", id), new Document("$set", docSample)); System.out.println(docSample); } } if (session.isOpen()) { session.close(); } sessionFactory.close(); mongoClient.close(); }
From source file:module.script.TransferBrbAnnotations.java
License:Open Source License
@SuppressWarnings({ "unused", "unchecked" }) public TransferBrbAnnotations() { // ===== Session PostgreSQL ===== SessionFactory sessionFactory = HibernateUtil .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml"); Session session = sessionFactory.openSession(); // ===== Session Mongo ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collection = db.getCollection("samples"); String sql = "select id_sample, main_gse_number, string_agg(id_substance, ', ') as list_substances from epimed_prod.om_sample " + "join epimed_prod.cl_biopatho using (id_biopatho) join epimed_prod.cl_patient using (id_patient) join epimed_prod.cl_exposure using (id_patient) " + "where exposed=true group by id_sample"; List<Object> list = session.createSQLQuery(sql).setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP).list(); for (Object item : list) { Map<String, Object> map = (HashMap<String, Object>) item; String gsmNumber = (String) map.get("id_sample"); String gseNumber = (String) map.get("main_gse_number"); System.out.println("-----------------------------"); System.out.println(gseNumber + " " + gsmNumber); Document doc = collection.find(Filters.eq("_id", gsmNumber)).first(); if (doc != null) { Document expGroup = (Document) doc.get("exp_group"); expGroup.put("exposure", map.get("list_substances")); System.out.println(expGroup); // Update Mongo document doc.put("exp_group", expGroup); doc.put("analyzed", true); UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber), new Document("$set", doc)); }// ww w .j ava 2 s . c om } if (session.isOpen()) { session.close(); } sessionFactory.close(); mongoClient.close(); }