Example usage for com.mongodb.client MongoDatabase getCollection

List of usage examples for com.mongodb.client MongoDatabase getCollection

Introduction

In this page you can find the example usage for com.mongodb.client MongoDatabase getCollection.

Prototype

MongoCollection<Document> getCollection(String collectionName);

Source Link

Document

Gets a collection.

Usage

From source file:module.script.ImportSupplementaryGSE25219.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public ImportSupplementaryGSE25219() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "NIHMS321722-supplement-7.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);//  w w w. ja  v a 2  s . c  o  m

    // ===== Format raw data into data structures ======

    List<Map<String, String>> listMap = new ArrayList<Map<String, String>>();
    List<String> headerMap = new ArrayList<String>();
    Map<String, String> mapBrain = new HashMap<String, String>();

    for (int i = 0; i < excelService.getData().size(); i++) {
        List<Object> dataLine = excelService.getData().get(i);

        String brainCode = (String) dataLine.get(0);
        if (brainCode != null) {
            mapBrain = new HashMap<String, String>();
        }

        // Existing brain code
        if (dataLine != null && dataLine.size() > 2 && dataLine.get(1) != null && dataLine.get(2) != null) {
            // System.out.println(dataLine.get(1) + " = " + dataLine.get(2));
            mapBrain.put(dataLine.get(1).toString().trim(), dataLine.get(2).toString().trim());
        }

        if (brainCode != null) {
            // New Brain code

            // System.out.println("brain code " + brainCode);
            headerMap.add(brainCode);
            listMap.add(mapBrain);
        }
    }

    // ===== Recognize data =====

    for (int i = 0; i < headerMap.size(); i++) {
        System.out.println("----------------------------");
        String code = headerMap.get(i);
        System.out.println(i + " " + code);
        Map<String, String> map = listMap.get(i);

        Map<String, String> updatedMap = new HashMap<String, String>();

        for (Map.Entry<String, String> entry : map.entrySet()) {
            String key = entry.getKey();
            String value = entry.getValue();

            if (!key.toLowerCase().equals("age")
                    // && !key.toLowerCase().equals("ethnicity")
                    // && !key.toLowerCase().equals("sex")
                    && !value.toLowerCase().equals("no data")) {
                updatedMap.put(key, value);
            }

            // System.out.println(key + " -> " + value);
        }

        List<Document> listDocuments = collection
                .find(Filters.and(Filters.eq("exp_group.main_gse_number", "GSE25219"),
                        Filters.eq("parameters.brain code", code)))
                .into(new ArrayList<Document>());
        System.out.println("Number of corresponding Mongo documents " + listDocuments.size());
        System.out.println(updatedMap);

        for (int j = 0; j < listDocuments.size(); j++) {
            Document doc = listDocuments.get(j);

            Document parameters = (Document) doc.get("parameters");
            parameters.putAll(updatedMap);
            System.out.println("\t" + parameters);

            // Update Mongo document
            doc.put("parameters", parameters);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", doc.get("_id")),
                    new Document("$set", doc));

        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.pro12.CreatePro12.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public CreatePro12() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    // MongoDatabase db = mongoClient.getDatabase("geo");

    MongoCollection<Document> collectionSeries = db.getCollection("series");

    Date submissionDate = null;//from  ww  w . j  ava2 s  .  c om
    try {
        submissionDate = dateFormat.parse("2010-01-01");
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    List<String> platforms = new ArrayList<String>();
    platforms.add("GPL570");

    Document docSeries = new Document();
    docSeries.append("_id", "PRO12").append("title", "Project 12: Reference tissues and cell lines")
            .append("platforms", platforms).append("submission_date", submissionDate)
            .append("last_update", submissionDate).append("import_date", new Date());

    System.out.println(docSeries);
    collectionSeries.insertOne(docSeries);

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.pro12.TransferPro12.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public TransferPro12() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select id_sample from epimed_prod.om_sample join epimed_prod.om_sample_series using (id_sample) "
            + "join epimed_prod.om_series using (id_series) where id_series='PRO12'";

    List<String> list = session.createSQLQuery(sql).list();

    Document pro12 = new Document();
    pro12.append("series", "PRO12");

    for (String gsmNumber : list) {

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        System.out.println("-----------------------------");
        System.out.println(gsmNumber + " " + doc);

        if (doc != null) {
            // Update Mongo document
            collection.updateOne(Filters.eq("_id", gsmNumber), new Document("$push", pro12));
        }// ww w  .j a va  2s  . c  o m

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.proallchen.CreateProallChen.java

License:Open Source License

public CreateProallChen() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    // MongoDatabase db = mongoClient.getDatabase("geo");

    MongoCollection<Document> collectionSeries = db.getCollection("series");

    Date submissionDate = null;//  w  w w. ja v a  2 s . com
    try {
        submissionDate = dateFormat.parse("2016-05-13");
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    // List<String> platforms = new ArrayList<String>();
    // platforms.add("GPL570");

    Document docSeries = new Document();
    docSeries.append("_id", "PROALL_CHEN")
            .append("title", "Genomic Profiling of Adult and Pediatric B-cell Acute Lymphoblastic Leukemia")
            .append("platforms", null).append("submission_date", submissionDate)
            .append("last_update", submissionDate).append("import_date", new Date());

    System.out.println(docSeries);
    collectionSeries.insertOne(docSeries);

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.proallchen.ImportProallChenOriginal.java

License:Open Source License

public ImportProallChenOriginal() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // ===== Samples ======

    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "PROALL_CHEN_clinical.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);//from w w  w.jav a  2  s.c o m

    System.out.println(excelService.getHeader());

    String idSeries = "PROALL_CHEN";
    List<String> listSeries = new ArrayList<String>();
    listSeries.add(idSeries);

    for (int i = 0; i < excelService.getData().size(); i++) {

        List<Object> line = excelService.getData().get(i);

        String idSample = "ESM" + line.get(0);

        System.out.println(idSample + " " + line);

        Document docSample = collectionSamples.find(Filters.eq("_id", idSample.trim())).first();

        System.out.println(docSample);

        Document parameters = (Document) docSample.get("parameters");

        for (int j = 0; j < excelService.getHeader().size(); j++) {

            String header = (String) excelService.getHeader().get(j);
            Object value = line.get(j);
            // System.out.println(header + " = " + value);

            parameters.append(header, value);

        }

        System.out.println(parameters);

        // Update Mongo document
        docSample.put("parameters", parameters);
        UpdateResult updateResult = collectionSamples.updateOne(Filters.eq("_id", docSample.get("_id")),
                new Document("$set", docSample));
    }

    mongoClient.close();
}

From source file:module.script.proallchen.ImportProallChenSupplementary.java

License:Open Source License

public ImportProallChenSupplementary() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // ===== Samples ======

    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator()
            + "DB_ALL_JIN_RNASEC_clinical_data_supplementary.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);//w ww.ja va2  s.  c  om

    System.out.println(excelService.getHeader());

    String idSeries = "PROALL_CHEN";
    List<String> listSeries = new ArrayList<String>();
    listSeries.add(idSeries);

    for (int i = 0; i < excelService.getData().size(); i++) {

        List<Object> line = excelService.getData().get(i);

        String idSample = "ESM" + line.get(0);

        System.out.println(idSample + " " + line);

        // ===== Sample Document =====

        Document docSample = new Document();

        docSample.append("_id", idSample).append("main_gse_number", idSeries).append("series", listSeries)
                .append("organism", "Homo sapiens").append("submission_date", today)
                .append("last_update", today).append("import_date", today).append("analyzed", false);

        // ===== Mandatory parameters =====
        Document expGroup = generateExpGroup(idSeries, idSample);
        docSample.append("exp_group", expGroup);

        // ===== Supplementary parameters =====

        Document parameters = new Document();
        parameters.append("id_sample", idSample);

        // === Attributes ===

        for (int j = 0; j < excelService.getHeader().size(); j++) {

            String header = (String) excelService.getHeader().get(j);
            Object value = line.get(j);
            // System.out.println(header + " = " + value);

            parameters.append(header, value);

        }
        docSample.append("parameters", parameters);

        System.out.println(docSample);

        // === Delete if already exist ===
        collectionSamples.deleteOne(eq("_id", idSample));

        // ===== Insert data =====
        collectionSamples.insertOne(docSample);

    }

    mongoClient.close();
}

From source file:module.script.probcp.CreateProbcp.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public CreateProbcp() {

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    // MongoDatabase db = mongoClient.getDatabase("geo");

    MongoCollection<Document> collectionSeries = db.getCollection("series");

    Date submissionDate = null;//from   w ww .j  a  v  a2 s .  c om
    try {
        submissionDate = dateFormat.parse("2017-03-20");
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    List<String> platforms = new ArrayList<String>();
    platforms.add("proteomics");

    Document docSeries = new Document();
    docSeries.append("_id", "PROBCP").append("title", "Breast cancer proteomics").append("platforms", platforms)
            .append("submission_date", submissionDate).append("last_update", submissionDate)
            .append("import_date", new Date());

    System.out.println(docSeries);
    collectionSeries.insertOne(docSeries);

    mongoClient.close();
}

From source file:module.script.probcp.ImportSamplesProbcp.java

License:Open Source License

public ImportSamplesProbcp() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    String[] studies = { "tya16", "law15" };
    List<String> series = new ArrayList<String>();

    for (int l = 0; l < studies.length; l++) {

        String idStudy = studies[l];
        String studyName = idStudy.toUpperCase();

        series.clear();/*from  ww w  .  j  a  va2 s.c  om*/
        series.add(studyName);
        series.add("PROBCP");

        String sql = "select * from st_bcp." + idStudy + "_sample order by id_sample";

        List<Object> listSamples = session.createSQLQuery(sql).list();

        for (int i = 0; i < listSamples.size(); i++) {

            Object[] lineSample = (Object[]) listSamples.get(i);

            String idSample = (String) lineSample[0];
            String clinicalClassification = (String) lineSample[1];
            String tnmStage = (String) lineSample[2];
            Integer grade = (Integer) lineSample[3];
            String type = (String) lineSample[4];

            System.out.println(Arrays.toString(lineSample));

            // ===== Collection method ====
            String collectionMethod = "biopsy";
            if (idStudy.equals("law15") && !idSample.startsWith("Tumor")) {
                collectionMethod = "cell line";
            }

            // ==== Topology ====
            ClTopology topology = session.get(ClTopology.class, "C50.9");

            // === Morphology ===
            ClMorphology morphology = session.get(ClMorphology.class, "8010/3"); // carcinoma
            ClMorphology idc = session.get(ClMorphology.class, "8500/3"); // inf. duct. carcinoma
            ClMorphology lo = session.get(ClMorphology.class, "8520/3"); // lobular carcinoma
            ClMorphology ac = session.get(ClMorphology.class, "8140/3"); // adenocarcinoma

            if (type != null && (type.contains("IDC") || type.contains("DC") || type.contains("ductal"))) {
                morphology = idc;
            }
            if (type != null && type.contains("Lo")) {
                morphology = lo;
            }

            if (type != null && (type.contains("AC") || type.contains("adeno"))) {
                morphology = ac;
            }

            // ===== Sample Document =====

            Document docSample = new Document();

            docSample.append("_id", studyName + "_" + idSample).append("main_gse_number", studyName)
                    .append("series", series).append("organism", "Homo sapiens")
                    .append("submission_date", today).append("last_update", today).append("import_date", today)
                    .append("analyzed", true);

            // ===== Mandatory parameters =====

            Document expGroup = this.generateExpGroup(idSample, studyName, tnmStage, grade, type,
                    collectionMethod, topology, morphology);
            docSample.append("exp_group", expGroup);

            // ===== Supplementary parameters =====

            Document parameters = this.generateParameters(idSample);
            docSample.append("parameters", parameters);
            parameters.append("clinical_classification", clinicalClassification);
            parameters.append("tnm_stage", tnmStage);
            parameters.append("grade", grade);
            parameters.append("type", type);

            // === Append parameters to document ===

            docSample.append("parameters", parameters);

            // === Save ===
            collectionSamples.insertOne(docSample);

            System.out.println(docSample);

        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();

}

From source file:module.script.probcp.UpdateSamplesProbcp.java

License:Open Source License

public UpdateSamplesProbcp() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    String[] studies = { "tya16" };
    List<String> series = new ArrayList<String>();

    for (int l = 0; l < studies.length; l++) {

        String idStudy = studies[l];
        String studyName = idStudy.toUpperCase();

        series.clear();//from  w w  w.j  a  va 2 s .c  o m
        series.add(studyName);
        series.add("PROBCP");

        String sql = "select * from st_bcp." + idStudy + "_sample order by id_sample";

        List<Object> listSamples = session.createSQLQuery(sql).list();

        for (int i = 0; i < listSamples.size(); i++) {

            Object[] lineSample = (Object[]) listSamples.get(i);

            String idSample = (String) lineSample[0];
            String clinicalClassification = (String) lineSample[1];
            String tnmStage = (String) lineSample[2];
            Integer grade = (Integer) lineSample[3];
            String type = (String) lineSample[4];

            System.out.println(Arrays.toString(lineSample));

            String id = studyName + "_" + idSample;

            Document docSample = collectionSamples.find(Filters.eq("_id", id)).first();
            Document expgroup = (Document) docSample.get("exp_group");
            expgroup.append("tnm_grade", grade);
            expgroup.append("tnm_stage", null);
            docSample.append("exp_group", expgroup);

            UpdateResult updateResult = collectionSamples.updateOne(Filters.eq("_id", id),
                    new Document("$set", docSample));

            System.out.println(docSample);

        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();

}

From source file:module.script.QueryAvailableData.java

License:Open Source License

public QueryAvailableData() {

    // ===== Service =====
    FormatService formatService = new FormatService();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Print block =====
    Block<Document> printBlock = new Block<Document>() {
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }//from   w  ww.  j a v a2s.  c  o m
    };

    // ===== Group by topology =====
    // db.getCollection('samples').aggregate({ $group: { "_id" : "$exp_group.topology", "total" : {$sum : 1} }}, {$sort : {total : -1}} )
    /*
    List<Document> listDocuments = collectionSamples.aggregate(
    Arrays.asList(
          Aggregates.group("$exp_group.topology", Accumulators.sum("total", 1)),
          Aggregates.sort(Sorts.orderBy(Sorts.descending("total")))
          ))
    .into(new ArrayList<Document>());
     */

    // ===== Group by sample =====
    /*
    List<Document> listSeries = collectionSeries
    .find()
    .projection(Projections.fields(Projections.include("title")))
    .sort(Sorts.ascending("_id"))
    .into(new ArrayList<Document>());
            
    for (Document doc : listSeries) {
            
       String idSeries = doc.getString("_id");
       Long nbSamples = collectionSamples.count((Filters.eq("series", idSeries)));
       doc.append("nbSamples", nbSamples);
    } 
    display(listSeries);
    */

    // === Export Geo for a list of idSeries ===

    // String[] listIdSeries = {"GSE11092","GSE13309", "GSE13159"};

    /*
    List<Document> docExpGroup = collectionSamples
    .find(Filters.in("series", listIdSeries))
    .projection(Projections.fields(Projections.include("exp_group"), Projections.excludeId()))
    .into(new ArrayList<Document>());
    // display(docExpGroup);
            
    List<String> header = formatService.extractHeader(docExpGroup, "exp_group");
    List<Object> data = formatService.extractData(docExpGroup, header, "exp_group");
    System.out.println(header);
    displayMatrix(data);
            
    */
    // List<Object> listObjects = formatService.convertHeterogeneousMongoDocuments(docExpGroup, "exp_group");
    // displayMatrix(listObjects);

    // List<Object> listObjects = formatService.convertHomogeneousMongoDocuments(listDocuments);

    // === Find series ===

    String[] listIdSamples = { "GSM80908", "GSM274639", "GSM274638", "GSM280213" };
    List<Document> listDocuments = collectionSamples
            .aggregate(Arrays.asList(Aggregates.match(Filters.in("_id", listIdSamples)),
                    Aggregates.group("$main_gse_number"),
                    Aggregates.sort(Sorts.orderBy(Sorts.ascending("main_gse_numbe")))))
            .into(new ArrayList<Document>());
    List<Object> listObjects = formatService.convertHomogeneousMongoDocuments(listDocuments);
    displayMatrix(listObjects);

    mongoClient.close();
}