Example usage for org.hibernate SessionFactory close

List of usage examples for org.hibernate SessionFactory close

Introduction

In this page you can find the example usage for org.hibernate SessionFactory close.

Prototype

void close() throws HibernateException;

Source Link

Document

Destroy this SessionFactory and release all resources (caches, connection pools, etc).

Usage

From source file:model.ConnectionBD.java

public static void main(String args[]) {
    SessionFactory sf = HibernateUtil.getSessionFactory();
    Session session = sf.openSession();/* w  w w .  jav a  2  s. c  om*/

    sf.close();
}

From source file:Modelos.Clases.Pruebas.java

public static void main(String[] args) {
    Configuration configuracion = new Configuration();
    configuracion.configure(); // lee el archivo de hibernate

    ServiceRegistry serviceRegistry = new ServiceRegistryBuilder().applySettings(configuracion.getProperties())
            .buildServiceRegistry();//  w  w  w.  j a v  a 2s . c o  m
    SessionFactory sessionFactory = configuracion.buildSessionFactory(serviceRegistry);
    Session session = sessionFactory.openSession();

    LoginDAO login = new LoginDAO("root", "root");
    ClienteDAO cliente = new ClienteDAO("nombre", "apellido", "telefono", "dni");
    VehiculoDAO vehiculo = new VehiculoDAO("marca", "modelo", "color", "matricula", "descripcion");
    MecanicoDAO mecanico = new MecanicoDAO("nombre", "apellido");
    cliente.setVehiculo(vehiculo);
    vehiculo.setCliente(cliente);
    vehiculo.setMecanico(mecanico);
    mecanico.setVehiculo(vehiculo);

    session.beginTransaction();
    session.saveOrUpdate(login);
    session.saveOrUpdate(cliente);
    session.saveOrUpdate(vehiculo);
    session.saveOrUpdate(mecanico);
    session.getTransaction().commit();
    session.close();
    sessionFactory.close();

}

From source file:module.AnalyseGeo.java

License:Open Source License

public AnalyseGeo() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("sample");
    List<Document> listDocuments = collection.find(Filters.in("series", gseNumber))
            // .find(Filters.and(Filters.in("series", gseNumber), Filters.eq("analyzed", false)))
            .into(new ArrayList<Document>());

    // ===== Service =====
    OntologyService ontologyService = new OntologyService(session);
    DispatcherFactory dispatcherFactory = new DispatcherFactory(session);

    // ===== Begin transaction =====
    session.beginTransaction();//from w w  w .j  a  va2 s  .co  m

    // ===== Analyse ======

    for (int i = 0; i < listDocuments.size(); i++) {
        // for (int i=0; i<1; i++) {
        Document doc = listDocuments.get(i);
        Document expGroup = (Document) doc.get("exp_group");

        String gsmNumber = doc.getString("_id");

        List<String> listEntries = new ArrayList<String>();
        List<String> parameters = new ArrayList<String>();

        String title = (String) expGroup.get("sample_title");
        String source = (String) expGroup.get("sample_source");
        listEntries.add(title);
        listEntries.add(source);

        Map<String, Object> mapParameters = (Map<String, Object>) doc.get("parameters");
        parameters.addAll(mapParameters.keySet());
        parameters.remove("id_sample");
        parameters.remove("extract_protocol");

        // To remove
        parameters.remove("lab description");

        for (int j = 0; j < parameters.size(); j++) {
            listEntries.add(parameters.get(j) + ": " + mapParameters.get(parameters.get(j)));
        }

        // === Clear already filled fields (only if necessary) ===
        // this.clear(expGroup);

        Map<String, List<Object>> mapOntologyObjects = ontologyService.recognizeOntologyObjects(listEntries);
        // Map <ClOntologyCategory, Set<String>> mapOntologyCategories = ontologyService.getMapOntologyCategories();
        // this.generateSummary(ontologyService, mapOntologyCategories, mapOntologyObjects);

        System.out.println("------------------------------------------------------------");
        System.out.println(i + " " + gsmNumber + " " + listEntries);
        System.out.println(ontologyService.toString());

        // ===== Create mapping objects and making links =====

        try {

            // === Dispatcher ===
            for (int j = 0; j < categories.length; j++) {

                dispatcherFactory.getObject(expGroup, mapOntologyObjects, categories[j]);

                System.out.print(categories[j]);
                if (expGroup.getString(categories[j]) != null) {
                    System.out.print(" " + expGroup.getString(categories[j]) + "\n");
                } else {
                    System.out.print("\n");
                }

            }

            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            if (commit) {
                UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                        new Document("$set", doc));

            }

        } catch (DispatcherException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

    }

    if (commit) {
        MongoCollection<Document> collectionSeries = db.getCollection("series");
        Document series = collectionSeries.find(Filters.eq("_id", gseNumber)).first();
        series.put("status", "analyzed");
        collectionSeries.updateOne(Filters.eq("_id", gseNumber), new Document("$set", series));
    }

    // === Commit transaction ===
    session.getTransaction().commit();
    // session.getTransaction().rollback();

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.ExportTissueOntology.java

License:Open Source License

public ExportTissueOntology() {

    // === Output matrix ===
    List<Object> data = new ArrayList<Object>();
    List<String> header = new ArrayList<String>();
    header.add("id_topology");
    header.add("topology");
    header.add("id_topology_group");
    header.add("topology_group");

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Begin transaction =====
    session.beginTransaction();//from   w  w w .  j a v  a 2s.c o m

    ClTopologyDao topologyDao = new ClTopologyDao(session);
    List<ClTopology> listTopologies = topologyDao.findAll();

    for (ClTopology t : listTopologies) {
        System.out.println(t);

        Object[] dataline = new Object[header.size()];

        int j = 0;
        dataline[j] = t.getIdTopology();
        dataline[++j] = t.getName();
        dataline[++j] = t.getClTopologyGroup().getIdGroup();
        dataline[++j] = t.getClTopologyGroup().getName();

        data.add(dataline);

        System.out.println(Arrays.toString(dataline));

    }

    // === Output  file ===

    String fileName = this.getOutputDirectory() + this.getDirSeparator() + "EpiMed_tissue_ontology" + "_"
            + dateFormat.format(new Date()) + ".xlsx";
    fileService.writeExcelFile(fileName, header, data);
    System.out.println(fileName);

    // === Commit transaction ===
    // session.getTransaction().commit();
    session.getTransaction().rollback();

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

}

From source file:module.ImportPlatformFromFile.java

License:Open Source License

public ImportPlatformFromFile() {

    // === Display ===
    System.out.println("\n================ BEGIN Module " + this.getClass().getName() + "================");

    // === INPUT ===
    String idPlatform = "GPL97";
    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "GPL97-17394.txt";
    String gpl = idPlatform.toLowerCase().trim();

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== DAO =====
    OmGeneDao geneDao = new OmGeneDao(session);

    // ===== Session Mongo =====
    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    try {/*from   w ww.j  ava 2s .  c  o  m*/
        // === Begin transaction ===
        session.beginTransaction();

        // ===== Load file =====
        System.out.println("ID Platform " + gpl);
        System.out.println("LOADING \t " + inputfile);
        System.out.println("Please wait... ");
        List<String> listRows = fileService.loadTextFile(inputfile);
        // List<String> listRows = webService.loadGeoData(idPlatform);
        System.out.println("File sucessfully LOADED");

        // ===== Recognize header =====

        List<String> header = fileService.readHeader(listRows, "\t");

        if (header == null || header.isEmpty()) {
            throw new ImportDataException("The header is empty");
        } else {
            System.out.println("Header " + header);
        }

        Integer indId = fileService.findIndex(header, "ID");
        Integer indGbacc = fileService.findIndex(header, "GB_ACC");
        Integer indEntrez = fileService.findIndex(header, "ENTREZ");

        if (indId == null || indGbacc == null || indEntrez == null) {
            throw new ImportDataException("Header not recognized: " + "ID index=" + indId + ", GB_ACC index="
                    + indGbacc + ", ENTREZ index=" + indEntrez);
        } else {
            System.out.println("The following header items are recognized:");
            System.out.println("\t ID index=" + indId + ": " + header.get(indId));
            System.out.println("\t GB_ACC index=" + indGbacc + ": " + header.get(indGbacc));
            System.out.println("\t ENTREZ index=" + indEntrez + ": " + header.get(indEntrez));
        }

        // ===== Recognize data =====

        List<List<String>> data = fileService.readData(listRows, "\t");

        if (data == null || data.isEmpty()) {
            throw new ImportDataException("The data are empty");
        } else {
            System.out.println(
                    "The data are sucessfully loaded: rows " + data.size() + ", columns " + data.get(0).size());
        }

        // ===== Create specific tables =====

        String sqlCheckTableProbe = "select * from information_schema.tables WHERE table_schema = 'hs' and table_name='om_probe_"
                + gpl + "'";

        List<Object> result = session.createNativeQuery(sqlCheckTableProbe).getResultList();

        String tableProbe = "hs.om_probe_" + gpl;
        String tableGP = "hs.om_gp_" + gpl;

        if (result == null || result.isEmpty()) {
            // Table probe
            String sqlCreateTableProbe = "create table " + tableProbe
                    + "(id_probe             VARCHAR(50)          not null,"
                    + " genbank_acc          VARCHAR(50)          null," + " constraint pk_om_probe_" + gpl
                    + " primary key (id_probe))";
            session.createNativeQuery(sqlCreateTableProbe).executeUpdate();

            // Table gp
            String sqlCreateTableGP = "create table " + tableGP
                    + "(id_probe             VARCHAR(50)          not null,"
                    + " id_gene              INT4                 not null," + " constraint pk_om_gp_" + gpl
                    + " primary key (id_probe, id_gene))";
            session.createNativeQuery(sqlCreateTableGP).executeUpdate();

            // Foregn keys

            String sqlAlterTableProbe = "alter table " + tableGP + " add constraint fk_gp_probe_" + gpl
                    + " foreign key (id_probe)" + "  references " + tableProbe
                    + " (id_probe) on delete restrict on update restrict";
            session.createNativeQuery(sqlAlterTableProbe).executeUpdate();

            String sqlAlterTableGene = "alter table " + tableGP + " add constraint fk_gp_gene_" + gpl
                    + " foreign key (id_gene)"
                    + "  references hs.om_gene (id_gene) on delete restrict on update restrict";
            session.createNativeQuery(sqlAlterTableGene).executeUpdate();
        }

        // ===== Import data =====

        for (int i = 0; i < data.size(); i++) {
            // for (int i=0; i<10; i++) {

            List<String> dataline = data.get(i);

            String idProbe = dataline.get(indId).trim();
            String genbankAcc = dataline.get(indGbacc).trim();

            String sqlInsertProbe = "insert into " + tableProbe + " values('" + idProbe + "',  null)";
            if (genbankAcc != null && !genbankAcc.isEmpty()) {
                sqlInsertProbe = "insert into " + tableProbe + " values('" + idProbe + "', '" + genbankAcc
                        + "')";
            }
            session.createNativeQuery(sqlInsertProbe).executeUpdate();

            OmGenbankUnigene gu = session.get(OmGenbankUnigene.class, genbankAcc);
            if (gu == null && genbankAcc != null && !genbankAcc.isEmpty()) {
                gu = new OmGenbankUnigene();
                gu.setGenbankAcc(genbankAcc);
                session.save(gu);
            }

            String listEntrez = null;
            String[] parts = null;
            if (indEntrez < dataline.size()) {
                listEntrez = dataline.get(indEntrez).trim();
                parts = listEntrez.split("[///\\p{Space}]");

                for (String entrezString : parts) {

                    Integer entrez = null;

                    try {
                        entrez = Integer.parseInt(entrezString);
                    } catch (NumberFormatException e) {
                        // nothing to do
                    }

                    if (entrez != null) {

                        OmGene gene = geneDao.find(entrez);
                        if (gene == null) {
                            gene = geneDao.createGene(entrez, null);
                        }

                        String sqlInsertGP = "insert into " + tableGP + " values('" + idProbe + "', " + entrez
                                + ")";
                        session.createNativeQuery(sqlInsertGP).executeUpdate();

                    }
                }
            }

            if (i % 1000 == 0) {
                System.out.println(i + "\t" + idProbe + "\t" + genbankAcc + "\t" + listEntrez + "\t"
                        + Arrays.toString(parts));
            }

            if (i % 20 == 0) {
                session.flush();
            }
        }

        // ===== Subscribe platform =====

        OmPlatform platform = session.get(OmPlatform.class, idPlatform);
        if (platform != null) {
            platform.setEnabled(true);
            session.update(platform);
        } else {
            MongoCollection<Document> collection = db.getCollection("platforms");
            Document docPlatform = collection.find(Filters.eq("_id", idPlatform)).first();
            String title = docPlatform.getString("title");
            String manufacturer = docPlatform.getString("manufacturer");
            platform = new OmPlatform();
            platform.setIdPlatform(idPlatform);
            platform.setTitle(title);
            platform.setManufacturer(manufacturer);
            platform.setEnabled(true);
            session.save(platform);
        }

        // ===== Rights =====
        String sqlRights;
        String[] users = { "epimed_prod", "epimed_web", "epimed_script" };
        for (String user : users) {
            sqlRights = "GRANT SELECT ON ALL TABLES IN SCHEMA hs TO " + user;
            session.createNativeQuery(sqlRights).executeUpdate();
        }
        sqlRights = "GRANT ALL ON ALL TABLES IN SCHEMA hs TO epimed_admin";
        session.createNativeQuery(sqlRights).executeUpdate();

        // === Commit transaction ===
        session.getTransaction().commit();
        // session.getTransaction().rollback();

    } catch (Exception e) {
        session.getTransaction().rollback();
        System.out.println("ROLLBACK in module " + this.getClass().getName());
        e.printStackTrace();
    } finally {
        if (session.isOpen()) {
            session.close();
        }
        sessionFactory.close();
        mongoClient.close();
    }

    // === Display ===
    System.out.println("================ END Module " + this.getClass().getName() + "================");

}

From source file:module.script.emtab365.UpdateSamplesEMTAB365.java

License:Open Source License

public UpdateSamplesEMTAB365() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== INIT =====

    ClMorphology ductal = session.get(ClMorphology.class, "8500/3"); // 8500/3   Infiltrating duct carcinoma, NOS (C50._)
    ClMorphology lobular = session.get(ClMorphology.class, "8520/3"); // 8520/3   Lobular carcinoma, NOS (C50._)
    ClMorphology morphology = session.get(ClMorphology.class, "8010/3"); // Carcinoma

    ClTopology breast = session.get(ClTopology.class, "C50.9"); // Breast
    ClTopology blood = session.get(ClTopology.class, "C42.0"); // Blood
    ClTopology lymphNode = session.get(ClTopology.class, "C77.9"); // Lymph node

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    List<Document> listDocuments = collection.find(Filters.in("series", gseNumber))
            .into(new ArrayList<Document>());

    for (int i = 0; i < listDocuments.size(); i++) {
        Document doc = listDocuments.get(i);
        Document expgroup = (Document) doc.get("exp_group");
        Document parameters = (Document) doc.get("parameters");

        String histoType = parameters.getString("Histology");
        String histoSubtype = parameters.getString("CIT classification");

        expgroup.put("histology_subtype", histoSubtype);

        if (histoType != null && histoType.toLowerCase().equals("lobular")) {
            morphology = lobular;/*from  w  ww. j  a  v  a2 s .co  m*/
        }
        if (histoType != null && histoType.toLowerCase().equals("ductal")) {
            morphology = ductal;
        }

        expgroup.put("id_morphology", morphology.getIdMorphology());
        expgroup.put("morphology", morphology.getName());

        expgroup.put("sample_source", parameters.getString("Source Name"));

        String organismPart = parameters.getString("OrgansimPart");

        ClTopology topology = null;
        if (organismPart != null) {

            if (organismPart.toLowerCase().contains("breast")) {
                topology = breast;
            }

            if (organismPart.toLowerCase().contains("blood")) {
                topology = blood;
            }
            if (organismPart.toLowerCase().contains("lymph")) {
                topology = lymphNode;
            }

        } else {
            topology = breast;
        }

        expgroup.put("id_topology", topology.getIdTopology());
        expgroup.put("topology", topology.getName());
        expgroup.put("id_topology_group", topology.getClTopologyGroup().getIdGroup());
        expgroup.put("topology_group", topology.getClTopologyGroup().getName());

        // ==== Survival =====

        Object dfs_months = parameters.get("Delay Metastasis Free Survival months");
        if (dfs_months != null) {
            expgroup.put("dfs_months", dfs_months);
        }

        Object os_months = parameters.get("Delay Overall Survival months");
        if (os_months != null) {
            expgroup.put("os_months", os_months);
        }

        Double os = (Double) expgroup.get("os_months");
        Double dfs = (Double) expgroup.get("dfs_months");
        if (os != null && dfs != null && dfs.equals(os)) {
            expgroup.put("relapsed", false);
        }

        if (os != null && dfs != null && dfs < os) {
            expgroup.put("relapsed", true);
        }

        if (os != null && dfs != null && dfs > os) {
            expgroup.put("relapsed", null);
        }

        Object relapseDate = parameters.get("Relapse  Metastasis Date");
        if (relapseDate != null) {
            expgroup.put("relapsed", true);
        }

        // ==== Grade ====
        expgroup.put("tnm_grade", parameters.get("Grade  Scarff Bloom Richardson"));

        // ==== Files =====

        expgroup.put("ftp", parameters.getString("ArrayExpress FTP file"));
        expgroup.put("file_name", parameters.getString("Array Data File"));

        expgroup.remove("individual");
        if (parameters.getString("Individual") != null) {
            expgroup.put("individual", parameters.getString("Individual"));
        }

        // ==== Biomarkers ====
        /*
        String p53 = parameters.getString("Phenotype - TP53  Gene mutation  Status");
        expgroup.put("p53", value)
                
        String pr = parameters.getString("PGR  Protein expression");
        String er = parameters.getString("ESR1  Protein expression");
        String her2 = parameters.getString("ERBB2  Protein expression");
         */

        doc.put("exp_group", expgroup);

        System.out.println(i + " " + doc.get("_id") + " " + doc.get("analyzed") + " " + expgroup);

        if (commit) {
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", doc.get("_id")),
                    new Document("$set", doc));
        }
    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.epimed_ontology.AddEpimedGroupToSamples.java

License:Open Source License

public AddEpimedGroupToSamples() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();
    ClTopologyDao topologyDao = new ClTopologyDao(session);

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSample = db.getCollection("sample");

    List<Document> samples = collectionSample.find().into(new ArrayList<Document>());

    for (int i = 0; i < samples.size(); i++) {
        Document sample = samples.get(i);
        Document expgroup = sample.get("exp_group", Document.class);

        String idTopology = expgroup.getString("id_topology");

        if (idTopology != null && !idTopology.isEmpty()) {

            ClTopology topology = topologyDao.find(idTopology);
            ClEpimedGroup grp1 = topology.getClEpimedGroup();
            ClEpimedGroup grp2 = grp1.getParent();
            ClEpimedGroup grp3 = grp2.getParent();
            expgroup.append("tissue_group_level1", grp1.getName());
            expgroup.append("tissue_group_level2", grp2.getName());
            expgroup.append("tissue_group_level3", grp3.getName());

            System.out.println((i + 1) + "/" + samples.size() + " " + expgroup);

            sample.append("exp_group", expgroup);
            collectionSample.updateOne(Filters.eq("_id", sample.getString("_id")),
                    new Document("$set", sample));
        }// ww  w .  ja  v  a  2 s  .com

    }

    // === Commit transaction ===
    // session.getTransaction().commit();
    session.getTransaction().rollback();

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.epimed_ontology.UpdateFetalAdultOvary.java

License:Open Source License

public UpdateFetalAdultOvary() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();
    ClTopologyDao topologyDao = new ClTopologyDao(session);
    ClTopology adultOvary = topologyDao.find("C56.9");
    ClTopology fetalOvary = topologyDao.find("E56.9");

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSample = db.getCollection("sample");

    Bson filters = Filters.and(Filters.eq("exp_group.id_topology", "C56.9"), // ovary
            Filters.eq("exp_group.id_tissue_stage", 1) // adult
    );//from   w w  w  .j a v a  2s  .  c  o m

    List<Document> samples = collectionSample.find(filters).into(new ArrayList<Document>());

    for (Document sample : samples) {
        Document expgroup = sample.get("exp_group", Document.class);
        expgroup.append("id_topology", adultOvary.getIdTopology());
        expgroup.append("topology", adultOvary.getName());
        sample.append("exp_group", expgroup);
        collectionSample.updateOne(Filters.eq("_id", sample.getString("_id")), new Document("$set", sample));
    }
    System.out.println(samples.size());

    // === Commit transaction ===
    // session.getTransaction().commit();
    session.getTransaction().rollback();

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.ImportSupplementaryGSE20711.java

License:Open Source License

public ImportSupplementaryGSE20711() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "GSE20711_emmm0003-0726-SD2.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);/*from w  w w.  jav a 2s  .com*/

    String gseNumber = "GSE20711";

    for (int i = 0; i < excelService.getData().size(); i++) {
        List<Object> dataLine = excelService.getData().get(i);

        String bcString = (String) dataLine.get(0);
        bcString = bcString.replaceAll("BC", "");

        Integer bcNumber = Integer.parseInt(bcString);

        Document docSample = collection
                .find(Filters
                        .and(Filters.in("series", gseNumber),
                                Filters.eq("exp_group.sample_title",
                                        "Breast tumor from patient P_" + bcNumber + " (expression data)")))
                .first();

        System.out.println("-------------------------------------------");
        System.out.println(dataLine);
        System.out.println(docSample);

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.ImportSupplementaryGSE25219.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public ImportSupplementaryGSE25219() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "NIHMS321722-supplement-7.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);/* w  w w. ja  v  a  2 s  .  c o m*/

    // ===== Format raw data into data structures ======

    List<Map<String, String>> listMap = new ArrayList<Map<String, String>>();
    List<String> headerMap = new ArrayList<String>();
    Map<String, String> mapBrain = new HashMap<String, String>();

    for (int i = 0; i < excelService.getData().size(); i++) {
        List<Object> dataLine = excelService.getData().get(i);

        String brainCode = (String) dataLine.get(0);
        if (brainCode != null) {
            mapBrain = new HashMap<String, String>();
        }

        // Existing brain code
        if (dataLine != null && dataLine.size() > 2 && dataLine.get(1) != null && dataLine.get(2) != null) {
            // System.out.println(dataLine.get(1) + " = " + dataLine.get(2));
            mapBrain.put(dataLine.get(1).toString().trim(), dataLine.get(2).toString().trim());
        }

        if (brainCode != null) {
            // New Brain code

            // System.out.println("brain code " + brainCode);
            headerMap.add(brainCode);
            listMap.add(mapBrain);
        }
    }

    // ===== Recognize data =====

    for (int i = 0; i < headerMap.size(); i++) {
        System.out.println("----------------------------");
        String code = headerMap.get(i);
        System.out.println(i + " " + code);
        Map<String, String> map = listMap.get(i);

        Map<String, String> updatedMap = new HashMap<String, String>();

        for (Map.Entry<String, String> entry : map.entrySet()) {
            String key = entry.getKey();
            String value = entry.getValue();

            if (!key.toLowerCase().equals("age")
                    // && !key.toLowerCase().equals("ethnicity")
                    // && !key.toLowerCase().equals("sex")
                    && !value.toLowerCase().equals("no data")) {
                updatedMap.put(key, value);
            }

            // System.out.println(key + " -> " + value);
        }

        List<Document> listDocuments = collection
                .find(Filters.and(Filters.eq("exp_group.main_gse_number", "GSE25219"),
                        Filters.eq("parameters.brain code", code)))
                .into(new ArrayList<Document>());
        System.out.println("Number of corresponding Mongo documents " + listDocuments.size());
        System.out.println(updatedMap);

        for (int j = 0; j < listDocuments.size(); j++) {
            Document doc = listDocuments.get(j);

            Document parameters = (Document) doc.get("parameters");
            parameters.putAll(updatedMap);
            System.out.println("\t" + parameters);

            // Update Mongo document
            doc.put("parameters", parameters);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", doc.get("_id")),
                    new Document("$set", doc));

        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}