Example usage for com.mongodb.client MongoCollection updateOne

List of usage examples for com.mongodb.client MongoCollection updateOne

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection updateOne.

Prototype

UpdateResult updateOne(Bson filter, List<? extends Bson> update);

Source Link

Document

Update a single document in the collection according to the specified arguments.

Usage

From source file:it.av.fac.dbi.util.FieldUpdater.java

public static void main(String[] args) {
    SimpleDateFormat parser = new SimpleDateFormat("EEE MMM d HH:mm:ss Z yyyy");

    MongoClient mongoClient = new MongoClient("127.0.0.1", 27017);
    MongoDatabase mongoDB = mongoClient.getDatabase("test");
    MongoCollection<Document> collection = mongoDB.getCollection("tweets");

    FindIterable<Document> documents = collection.find();
    documents.forEach(new Consumer<Document>() {
        @Override//from   www.  j a va2s  .c o  m
        public void accept(Document doc) {
            try {
                //System.out.println(doc.toJson());
                Document user = (Document) doc.get("user");
                String dateStr = user.getString("created_at");
                //System.out.println(dateStr);
                Date date = parser.parse(dateStr);
                //System.out.println(date);
                System.out.println(collection.updateOne(eq("_id", doc.get("_id")),
                        new Document("$set", new Document("user.created_at", date))));
            } catch (ParseException ex) {
                Logger.getLogger(FieldUpdater.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    });
}

From source file:it.terrinoni.hw3.PruneHomeworks.java

public static void main(String[] args) {
    // MongoDB connection
    MongoClient client = new MongoClient();
    MongoDatabase database = client.getDatabase("school");
    MongoCollection<Document> collection = database.getCollection("students");

    // Get the cursor to the collection
    MongoCursor<Document> cursor = collection.find().iterator();

    try {/*from   w ww  .j  a  v  a2 s.c o  m*/
        while (cursor.hasNext()) { // iteare over all the students
            double minScore = Double.MAX_VALUE; // set the maximum value
            Document minDoc = null; // temporary minimum
            Document student = cursor.next(); // current score

            // Retrieve the scores array
            List<Document> scores = student.get("scores", ArrayList.class);
            for (Document score : scores) { // iterate over the scores
                if (score.get("type", String.class).equals("homework")) { // get only the homeworks
                    System.out.println("Student " + student.getDouble("_id") + " has homework score equals to "
                            + score.getDouble("score"));
                    // Update the minimum score 
                    if (score.getDouble("score") < minScore) {
                        minScore = score.getDouble("score");
                        minDoc = score;
                    }
                }
            }
            // Remove the minimum score
            scores.remove(minDoc);

            // Update the student document
            Bson filter = eq("_id", student.getDouble("_id"));
            Document update = new Document("$set", new Document("scores", scores));
            collection.updateOne(filter, update);
        }
    } finally {
        cursor.close(); // close the cursos
    }
}

From source file:module.AnalyseGeo.java

License:Open Source License

public AnalyseGeo() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("sample");
    List<Document> listDocuments = collection.find(Filters.in("series", gseNumber))
            // .find(Filters.and(Filters.in("series", gseNumber), Filters.eq("analyzed", false)))
            .into(new ArrayList<Document>());

    // ===== Service =====
    OntologyService ontologyService = new OntologyService(session);
    DispatcherFactory dispatcherFactory = new DispatcherFactory(session);

    // ===== Begin transaction =====
    session.beginTransaction();//w w  w. j av a  2  s .c  o  m

    // ===== Analyse ======

    for (int i = 0; i < listDocuments.size(); i++) {
        // for (int i=0; i<1; i++) {
        Document doc = listDocuments.get(i);
        Document expGroup = (Document) doc.get("exp_group");

        String gsmNumber = doc.getString("_id");

        List<String> listEntries = new ArrayList<String>();
        List<String> parameters = new ArrayList<String>();

        String title = (String) expGroup.get("sample_title");
        String source = (String) expGroup.get("sample_source");
        listEntries.add(title);
        listEntries.add(source);

        Map<String, Object> mapParameters = (Map<String, Object>) doc.get("parameters");
        parameters.addAll(mapParameters.keySet());
        parameters.remove("id_sample");
        parameters.remove("extract_protocol");

        // To remove
        parameters.remove("lab description");

        for (int j = 0; j < parameters.size(); j++) {
            listEntries.add(parameters.get(j) + ": " + mapParameters.get(parameters.get(j)));
        }

        // === Clear already filled fields (only if necessary) ===
        // this.clear(expGroup);

        Map<String, List<Object>> mapOntologyObjects = ontologyService.recognizeOntologyObjects(listEntries);
        // Map <ClOntologyCategory, Set<String>> mapOntologyCategories = ontologyService.getMapOntologyCategories();
        // this.generateSummary(ontologyService, mapOntologyCategories, mapOntologyObjects);

        System.out.println("------------------------------------------------------------");
        System.out.println(i + " " + gsmNumber + " " + listEntries);
        System.out.println(ontologyService.toString());

        // ===== Create mapping objects and making links =====

        try {

            // === Dispatcher ===
            for (int j = 0; j < categories.length; j++) {

                dispatcherFactory.getObject(expGroup, mapOntologyObjects, categories[j]);

                System.out.print(categories[j]);
                if (expGroup.getString(categories[j]) != null) {
                    System.out.print(" " + expGroup.getString(categories[j]) + "\n");
                } else {
                    System.out.print("\n");
                }

            }

            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            if (commit) {
                UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                        new Document("$set", doc));

            }

        } catch (DispatcherException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

    }

    if (commit) {
        MongoCollection<Document> collectionSeries = db.getCollection("series");
        Document series = collectionSeries.find(Filters.eq("_id", gseNumber)).first();
        series.put("status", "analyzed");
        collectionSeries.updateOne(Filters.eq("_id", gseNumber), new Document("$set", series));
    }

    // === Commit transaction ===
    session.getTransaction().commit();
    // session.getTransaction().rollback();

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.ClearGeoExpGroup.java

License:Open Source License

public ClearGeoExpGroup() {

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");
    List<Document> listDocuments = collection.find(Filters.in("series", gseNumber))
            .into(new ArrayList<Document>());

    // ===== Analyse ======

    for (int i = 0; i < listDocuments.size(); i++) {

        Document doc = listDocuments.get(i);
        String id = doc.getString("_id");
        Document expGroup = (Document) doc.get("exp_group");
        this.clear(expGroup);
        expGroup.remove("er");
        expGroup.remove("pr");
        expGroup.remove("her2");
        expGroup.remove("triple_negative");

        // Update Mongo document
        doc.put("exp_group", expGroup);
        doc.put("analyzed", false);
        if (commit) {
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", id), new Document("$set", doc));

        }/*from w  w  w. jav  a 2 s .  c o  m*/

    }

    mongoClient.close();
}

From source file:module.CreateStudy.java

License:Open Source License

@SuppressWarnings("unchecked")
public CreateStudy() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // === Excel data loader ===

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "prolung2_expgrp4.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);// w  w w.  ja va2  s.  c  o m
    List<Object> listCel = excelService.extractColumn(0);

    Integer indCel = excelService.getHeaderMap().get("gse8894_sample_cel");

    // ===  New Series === 
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    Document docSeries = new Document();
    docSeries.append("_id", "PROLUNG").append("title", "Lung cancerous and non-cancerous samples")
            .append("platforms", null).append("submission_date", today).append("last_update", today)
            .append("import_date", today);

    UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", docSeries.get("_id")),
            new Document("$set", docSeries));
    if (updateResult.getMatchedCount() == 0) {
        collectionSeries.insertOne(docSeries);
    }

    // === Add samples to new series ===
    MongoCollection<Document> collectionSamples = db.getCollection("samples");
    for (int i = 0; i < listCel.size(); i++) {

        String gsm = this.getGsm(listCel.get(i));

        Document docSample = collectionSamples.find(Filters.eq("_id", gsm)).first();

        if (docSample == null) {
            System.err.println("ERROR! Sample " + gsm + "doesn't exist. Try another column.");

            gsm = this.getGsm(excelService.getData().get(i).get(indCel));
            docSample = collectionSamples.find(Filters.eq("_id", gsm)).first();

            if (docSample == null) {
                System.err.println("ERROR! Sample " + gsm + " doesn't exist. Exit.");
                System.exit(0);
            } else {
                System.err.println("Found " + gsm);
            }
        }

        Document expGroup = (Document) docSample.get("exp_group");
        setGpl.add(expGroup.get("id_platform").toString());

        List<String> listSeries = (List<String>) docSample.get("series");
        listSeries.add(docSeries.getString("_id"));
        docSample.put("series", listSeries);

        System.out.println(docSample);
        // updateResult = collectionSamples.updateOne(Filters.eq("_id", docSample.get("_id")), new Document("$set", docSample));
    }

    // === Update platforms of the series ===

    System.out.println(setGpl);

    docSeries.put("platforms", setGpl);
    updateResult = collectionSeries.updateOne(Filters.eq("_id", docSeries.get("_id")),
            new Document("$set", docSeries));
    if (updateResult.getMatchedCount() == 0) {
        collectionSeries.insertOne(docSeries);
    }

}

From source file:module.ImportArrayExpress.java

License:Open Source License

public ImportArrayExpress() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("sample");

    // ===== Pattern =====
    String patternText = "\\[[\\p{Print}\\p{Space}]+\\]";
    ;//from www . j  a va  2  s . co  m
    Pattern pattern = Pattern.compile(patternText);

    // ===== Series =====

    for (String accession : listAccessions) {

        List<String> accessionAsList = new ArrayList<String>();
        accessionAsList.add(accession);

        String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession
                + ".idf.txt";
        System.out.println(urlString);
        String text = webService.loadUrl(urlString);

        String[] parts = text.split(lineSeparator);
        List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts));

        AESeries series = new AESeries(dataSeries);
        System.out.println(series);

        // ===== Check if already imported as a GSE ===== 
        boolean isGseFound = false;
        String gseNumber = null;
        for (String secondaryAccession : series.getListAccessions()) {
            if (secondaryAccession.startsWith("GSE")) {
                gseNumber = secondaryAccession;
                Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first();
                isGseFound = gse != null;

            }
        }

        int nbImportedSamples = 0;

        if (!isGseFound) {

            // ===== Create Mongo series =====

            Document docSeries = mongoService.createSeries(accession, series.getTitle(), null,
                    series.getSubmissionDate(), series.getSubmissionDate());

            if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) {
                docSeries.put("secondary_accessions", series.getListAccessions());
            }

            if (commit) {
                UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession),
                        new Document("$set", docSeries));
                if (updateResult.getMatchedCount() == 0) {
                    collectionSeries.insertOne(docSeries);
                }
            }

            System.out.println(docSeries);

            // ===== Import clinical data =====

            String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf();
            System.out.println(url);
            String clindata = webService.loadUrl(url);

            String[] clinparts = clindata.split(lineSeparator);
            List<String> data = new ArrayList<String>(Arrays.asList(clinparts));

            // ===== Recognize samples =====

            List<String> header = this.createHeader(data.get(0), pattern);
            System.out.println(header);

            for (int i = 1; i < data.size(); i++) {

                Integer nbSamples = data.size() - 1;

                Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header);
                String idSample = this.createIdSample(mapParameters);

                if (idSample == null) {
                    System.err.println("ERROR: idSample is not recongnized for " + accession);
                    System.out.println("Line " + i);
                    System.out.println(mapParameters);
                    mongoClient.close();
                    System.exit(0);
                } else {
                    if (formatIdSample) {
                        idSample = accession + "-" + idSample;
                        idSample = idSample.trim().replaceAll(" ", "-");
                    }
                }
                idSample = idSample.split(" ")[0].trim();

                // === Organism ===
                String organism = (String) mapParameters.get("organism");
                if (organism == null || organism.isEmpty()) {
                    organism = defaultOrganism;
                }

                // === Platform ===
                String platform = (String) mapParameters.get("LIBRARY_STRATEGY");
                if (platform != null && !platform.isEmpty()) {
                    platform = platform.toLowerCase().trim();
                } else {
                    platform = defaultPlatform;
                }

                Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first();
                boolean docAlreadyExist = docSampleExist != null;

                boolean analysed = false;

                if (docAlreadyExist) {
                    analysed = (Boolean) docSampleExist.get("analyzed");
                }

                // ===== Sample Document =====

                Document docSample = mongoService.createSample(idSample, (String) docSeries.get("_id"),
                        accessionAsList, organism, (Date) docSeries.get("submission_date"),
                        (Date) docSeries.get("last_update"), analysed);

                Document expGroup = null;
                Document parameters = null;

                // System.out.println("------------------------------------------------------------------");

                if (docAlreadyExist) {
                    // === ID sample alredy exists ===
                    System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample
                            + ":  already exists in the database, analyzed=" + analysed);
                    expGroup = docSampleExist.get("exp_group", Document.class);
                    parameters = mongoService.updateParameters(docSampleExist, mapParameters);
                } else {
                    // === New sample ===
                    System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample);
                    expGroup = mongoService.createExpGroup(docSample, platform, null, null, organism);
                    parameters = mongoService.createParameters(docSample, mapParameters);
                    nbImportedSamples++;
                }

                // === Update sample_title, sample_source, layout ===
                expGroup.put("sample_title", parameters.getString("organism part"));
                expGroup.put("sample_source", parameters.getString("Source Name"));
                expGroup.put("layout", parameters.getString("LIBRARY_LAYOUT"));

                docSample.append("exp_group", expGroup);
                docSample.append("parameters", parameters);

                if (commit) {

                    // === Update old if already exist ===
                    if (docAlreadyExist) {
                        // collectionSamples.deleteOne(eq("_id", idSample));
                        collectionSamples.updateOne(Filters.eq("_id", idSample),
                                new Document("$set", docSample));
                    } else {
                        // ===== Insert data =====
                        collectionSamples.insertOne(docSample);
                    }

                    // ===== Update series for platforms =====
                    List<String> listPlatforms = collectionSamples
                            .distinct("exp_group.id_platform", Filters.in("series", accession), String.class)
                            .into(new ArrayList<String>());
                    docSeries.append("platforms", listPlatforms);
                    collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries));
                }

            }

        } else {
            System.out.println("GEO accession " + gseNumber + " corresponding to  " + accession
                    + " exists already. Skip import.");
        }

        System.out.println("Number of imported samples: " + nbImportedSamples);

    }

    mongoClient.close();

}

From source file:module.ImportArrayExpressInit.java

License:Open Source License

public ImportArrayExpressInit() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("sample");

    // ===== Pattern =====
    String patternText = "\\[[\\p{Print}\\p{Space}]+\\]";
    ;//from   w ww .j a  v a  2 s. c  om
    Pattern pattern = Pattern.compile(patternText);

    // ===== Series =====

    for (String accession : listAccessions) {

        String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession
                + ".idf.txt";
        System.out.println(urlString);
        String text = webService.loadUrl(urlString);

        String[] parts = text.split(lineSeparator);
        List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts));

        AESeries series = new AESeries(dataSeries);
        System.out.println(series);

        // ===== Check if already imported as a GSE ===== 
        boolean isGseFound = false;
        String gseNumber = null;
        for (String secondaryAccession : series.getListAccessions()) {
            if (secondaryAccession.startsWith("GSE")) {
                gseNumber = secondaryAccession;
                Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first();
                isGseFound = gse != null;
                // System.out.println("GEO accession " +  gseNumber + " found: " + isGseFound);
            }
        }

        if (!isGseFound) {

            // ===== Create Mongo series =====

            List<String> listSeriesAcc = new ArrayList<String>();
            listSeriesAcc.add(accession);

            Document docSeries = mongoService.createSeries(accession, series.getTitle(), null,
                    series.getSubmissionDate(), series.getSubmissionDate());

            if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) {
                listSeriesAcc.addAll(series.getListAccessions());
            }

            docSeries.put("accessions", listSeriesAcc);

            UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession),
                    new Document("$set", docSeries));
            if (updateResult.getMatchedCount() == 0) {
                collectionSeries.insertOne(docSeries);
            }

            System.out.println(docSeries);

            // ===== Import clinical data =====

            String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf();
            System.out.println(url);
            String clindata = webService.loadUrl(url);

            String[] clinparts = clindata.split(lineSeparator);
            List<String> data = new ArrayList<String>(Arrays.asList(clinparts));

            // ===== Samples =====

            List<String> header = this.createHeader(data.get(0), pattern);
            System.out.println(header);

            for (int i = 1; i < data.size(); i++) {

                Integer nbSamples = data.size() - 1;

                Map<String, Object> mapParameters = this.createParameters(data.get(i), header);
                String idSample = this.createIdSample(mapParameters);

                if (idSample == null) {
                    System.err.println("idSample is not recongnized for " + mapParameters);
                    mongoClient.close();
                    System.exit(0);
                }

                String organism = (String) mapParameters.get("organism");
                if (organism == null || organism.isEmpty()) {
                    organism = "Homo sapiens";
                }
                String platform = (String) mapParameters.get("LIBRARY_STRATEGY");
                if (platform != null && !platform.isEmpty()) {
                    platform = platform.toLowerCase().trim();
                } else {
                    platform = "rna-seq";
                }
                String layout = (String) mapParameters.get("LIBRARY_LAYOUT");
                if (layout != null && !layout.isEmpty()) {
                    layout = layout.toLowerCase().trim();
                }

                Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first();
                boolean docAlreadyExist = docSampleExist != null;

                boolean analysed = false;

                if (docAlreadyExist) {
                    analysed = (Boolean) docSampleExist.get("analyzed");
                    System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample
                            + ":  already exists in the database, analyzed=" + analysed);
                } else {
                    System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample);
                }

                // ===== Sample Document =====

                Document docSample = mongoService.createSample(idSample, (String) docSeries.get("_id"),
                        listSeriesAcc, organism, (Date) docSeries.get("submission_date"),
                        (Date) docSeries.get("last_update"), analysed);

                // ===== Mandatory parameters =====

                // Preserve "exp_group" if the document exists already

                Document expGroup = null;
                if (docAlreadyExist) {
                    expGroup = (Document) docSampleExist.get("exp_group");
                } else {
                    expGroup = mongoService.createExpGroup(docSample, platform,
                            (String) mapParameters.get("organism part"),
                            (String) mapParameters.get("Source Name"), organism);
                    if (layout != null) {
                        expGroup.append("layout", layout);

                        // run_name
                        int j = 0;
                        boolean isFound = false;
                        String runName = null;
                        while (!isFound && j < listRunNameParameters.length) {
                            runName = (String) mapParameters.get(listRunNameParameters[j]);
                            isFound = runName != null;
                            j++;
                        }
                        if (runName != null) {
                            expGroup.append("run_name", runName);
                        }

                    }
                }

                docSample.append("exp_group", expGroup);

                // ===== Supplementary parameters =====

                Document parameters = mongoService.createParameters(docSample, mapParameters);
                docSample.append("parameters", parameters);

                // === Delete if already exist ===
                collectionSamples.deleteOne(eq("_id", idSample));

                // ===== Insert data =====
                collectionSamples.insertOne(docSample);

                // ===== Update series for platforms =====
                List<String> listPlatforms = collectionSamples
                        .distinct("exp_group.id_platform", Filters.in("series", accession), String.class)
                        .into(new ArrayList<String>());
                docSeries.append("platforms", listPlatforms);
                collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries));

            }

        } else {
            System.out.println("GEO accession " + gseNumber + " corresponding to  " + accession
                    + " exists already. Skip import.");
        }
    }

    mongoClient.close();

}

From source file:module.ImportGeo.java

License:Open Source License

public ImportGeo() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // ===== Insert data =====

    for (int k = 0; k < listGseNumber.length; k++) {

        String gseNumber = listGseNumber[k];

        System.out.println("------------------------------------------");
        System.out.println(k + " Import " + gseNumber);

        // ===== Load GSE =====

        NcbiGeoGse gse = new NcbiGeoGse(webService.loadGeo(gseNumber));
        System.out.println(gse);/* ww  w.j a va2  s  .co m*/

        // ===== Series =====

        MongoCollection<Document> collectionSeries = db.getCollection("series");

        Document docSeries = mongoService.createSeries(gse.getGseNumber(), gse.getTitle(), gse.getListGpl(),
                gse.getSubmissionDate(), gse.getLastUpdate());

        UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", gse.getGseNumber()),
                new Document("$set", docSeries));
        if (updateResult.getMatchedCount() == 0) {
            collectionSeries.insertOne(docSeries);
        }

        // ===== Platforms =====

        MongoCollection<Document> collectionPlatforms = db.getCollection("platform");

        for (int i = 0; i < gse.getListGpl().size(); i++) {

            NcbiGeoGpl gpl = new NcbiGeoGpl(webService.loadGeo(gse.getListGpl().get(i)));

            System.out.println("\t Import platform " + gpl.getGplNumber());

            Document docPlatforms = mongoService.createPlatform(gpl.getGplNumber(), gpl.getTitle(),
                    gpl.getTaxid(), gpl.getOrganism(), gpl.getManufacturer(), gpl.getSubmissionDate(),
                    gpl.getLastUpdate(), gpl.getTechnology());

            UpdateResult res = collectionPlatforms.updateOne(Filters.eq("_id", gpl.getGplNumber()),
                    new Document("$set", docPlatforms));
            if (res.getMatchedCount() == 0) {
                collectionPlatforms.insertOne(docPlatforms);
            }
        }

        // ===== Samples ======

        MongoCollection<Document> collectionSamples = db.getCollection("sample");

        // for (int i=0; i<1; i++) {
        for (int i = 0; i < gse.getListGsm().size(); i++) {

            NcbiGeoGsm gsm = new NcbiGeoGsm(webService.loadGeo(gse.getListGsm().get(i)));

            Document docSampleExist = collectionSamples.find(Filters.eq("_id", gsm.getGsmNumber())).first();
            boolean docAlreadyExist = docSampleExist != null;

            boolean analysed = false;

            if (docAlreadyExist) {
                analysed = (Boolean) docSampleExist.get("analyzed");
                System.out.println(i + "/" + gse.getListGsm().size() + "\t " + gse.getGseNumber() + "\t "
                        + gsm.getGsmNumber() + ":  already exists in the database, analyzed=" + analysed);
            } else {
                System.out.println(i + "/" + gse.getListGsm().size() + "\t " + gse.getGseNumber() + "\t "
                        + gsm.getGsmNumber());
            }

            // ===== Sample Document =====

            Document docSample = mongoService.createSample(gsm.getGsmNumber(), gse.getGseNumber(),
                    gsm.getListGse(), gsm.getOrganism(), gsm.getSubmissionDate(), gsm.getLastUpdate(),
                    analysed);

            // ===== Mandatory parameters =====

            // Preserve "exp_group" if the document exists already

            Document expGroup = null;
            if (docAlreadyExist) {
                expGroup = (Document) docSampleExist.get("exp_group");
            } else {
                expGroup = mongoService.createExpGroup(docSample, gsm.getGplNumber(), gsm.getTitle(),
                        gsm.getSourceName(), gsm.getOrganism());

            }
            docSample.append("exp_group", expGroup);

            // ===== Supplementary parameters =====

            Document parameters = generateParameters(gsm);
            docSample.append("parameters", parameters);

            // === Delete if already exist ===
            collectionSamples.deleteOne(eq("_id", gsm.getGsmNumber()));

            // ===== Insert data =====
            collectionSamples.insertOne(docSample);

        }

    }

    mongoClient.close();
}

From source file:module.script.AddSeriesToSamples.java

License:Open Source License

public AddSeriesToSamples() {

    // ===== Service =====
    FormatService formatService = new FormatService();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    Set<String> setProjects = new HashSet<String>();

    MongoCollection<Document> collection = db.getCollection("sample");

    Bson filters = Filters.and(Filters.in("series", "PRJNA270632"));

    List<Document> listDocuments = collection.find(filters).into(new ArrayList<Document>());

    for (int i = 0; i < listDocuments.size(); i++) {

        Document doc = listDocuments.get(i);
        Document expgroup = doc.get("exp_group", Document.class);

        if (expgroup.get("exp_Mcount") != null) {

            List<String> projects = doc.get("series", ArrayList.class);
            setProjects.clear();/* w w w.ja  va 2  s.  c  o m*/
            setProjects.addAll(projects);
            setProjects.add("TISSUE_SPECIFIC_GENES_HS");
            doc.put("series", setProjects);
            System.out.println(doc.getString("_id") + " " + projects + " -> " + setProjects);

            collection.updateOne(Filters.eq("_id", doc.getString("_id")), new Document("$set", doc));
        }

    }

    mongoClient.close();
}

From source file:module.script.CorrectImportedData.java

License:Open Source License

public CorrectImportedData() {

    // ===== Service =====
    FormatService formatService = new FormatService();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("sample");

    Bson filters = Filters.and(Filters.eq("main_gse_number", gseNumber));

    List<Document> listDocuments = collection.find(filters).into(new ArrayList<Document>());

    for (int i = 0; i < listDocuments.size(); i++) {

        Document doc = listDocuments.get(i);
        Document expgroup = (Document) doc.get("exp_group");
        Document parameters = (Document) doc.get("parameters");

        expgroup.append("id_tissue_stage", 2);
        expgroup.append("tissue_stage", "fetal");

        // Update Mongo document
        doc.put("exp_group", expgroup);
        // doc.put("parameters", parameters);
        doc.put("analyzed", true);

        System.out.println(expgroup);

        collection.updateOne(Filters.eq("_id", doc.getString("_id")), new Document("$set", doc));
    }//from  www . j  a v  a  2s  . c om

    mongoClient.close();
}