Example usage for com.mongodb.client MongoDatabase getCollection

List of usage examples for com.mongodb.client MongoDatabase getCollection

Introduction

In this page you can find the example usage for com.mongodb.client MongoDatabase getCollection.

Prototype

MongoCollection<Document> getCollection(String collectionName);

Source Link

Document

Gets a collection.

Usage

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

/**
 *
 * @param queryResources query to load resources to describe.
 * @param queryDescribe query to describe each candidate; it has to be a
 * describe/construct./*from  w w w.j  av a2 s .co m*/
 * @param collection collection name in Mongo db.
 */
private void loadResources(String queryResources, String queryDescribe, String c) {
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port")); StringWriter writter = new StringWriter();) {
        RepositoryConnection conn = sesameService.getConnection();

        int num_candidates = 0;
        try {
            MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
            // Delete and create collection
            MongoCollection<Document> collection = db.getCollection(c);
            collection.drop();

            RDFWriter jsonldWritter = Rio.createWriter(RDFFormat.JSONLD, writter);
            TupleQueryResult resources = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryResources)
                    .evaluate();
            while (resources.hasNext()) {
                String resource = resources.next().getValue("subject").stringValue();
                conn.prepareGraphQuery(QueryLanguage.SPARQL, queryDescribe.replace("{}", resource))
                        .evaluate(jsonldWritter);
                Object compact = JsonLdProcessor.compact(JsonUtils.fromString(writter.toString()), context,
                        new JsonLdOptions());
                Map<String, Object> json = (Map<String, Object>) compact;
                json.put("_id", resource);
                collection.insertOne(new Document(json));
                writter.getBuffer().setLength(0);
                log.info("{} inserting describe for resource {}", ++num_candidates, resource);
            }
            log.info("Load {} resources into {} collection", num_candidates, collection);
        } finally {
            conn.close();
        }
    } catch (RepositoryException ex) {
        log.error("Cannot retrieve Sesame connection", ex);
    } catch (MalformedQueryException ex) {
        log.error("Query cannot be processed", ex);
    } catch (QueryEvaluationException ex) {
        log.error("Execution of query failed", ex);
    } catch (RDFHandlerException ex) {
        log.error("Cannot handle RDFWriter", ex);
    } catch (JsonLdError ex) {
        log.error("Cannot convert string to JSON-LD", ex);
    } catch (IOException ex) {
        log.error("IO error", ex);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

private void loadStadistics(String c, HashMap<String, String> queries) {
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port")); StringWriter writter = new StringWriter();) {
        RepositoryConnection conn = sesameService.getConnection();

        try {//  w  w  w .  j a v a  2  s.  c o m
            MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
            // Delete and create collection
            MongoCollection<Document> collection = db.getCollection(c);
            collection.drop();

            RDFWriter jsonldWritter = Rio.createWriter(RDFFormat.JSONLD, writter);
            for (String key : queries.keySet()) {
                log.info("Getting {} query", key);

                conn.prepareGraphQuery(QueryLanguage.SPARQL, queries.get(key)).evaluate(jsonldWritter);
                Object compact = JsonLdProcessor.compact(JsonUtils.fromString(writter.toString()), context,
                        new JsonLdOptions());
                Map<String, Object> json = (Map<String, Object>) compact;
                json.put("_id", key);
                collection.insertOne(new Document(json));
                writter.getBuffer().setLength(0);
                log.info("Load aggregation into {} collection for id '{}'", c, key);
            }
        } finally {
            conn.close();
        }
    } catch (RepositoryException ex) {
        log.error("Cannot retrieve Sesame connection", ex);
    } catch (MalformedQueryException ex) {
        log.error("Query cannot be processed", ex);
    } catch (QueryEvaluationException ex) {
        log.error("Execution of query failed", ex);
    } catch (RDFHandlerException ex) {
        log.error("Cannot handle RDFWriter", ex);
    } catch (JsonLdError ex) {
        log.error("Cannot convert string to JSON-LD", ex);
    } catch (IOException ex) {
        log.error("IO error", ex);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void authors() {
    final Task task = taskManagerService.createSubTask("Caching authors profiles", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
        // Delete and create collection
        final MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.AUTHORS.getValue());
        collection.drop();/*from   w  w w. j  a  va2  s. co  m*/
        final List<Map<String, Value>> authorsRedi = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getAuthorsCentralGraph());
        task.updateTotalSteps(authorsRedi.size());
        BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5);
        for (int i = 0; i < authorsRedi.size(); i++) {
            final String author = authorsRedi.get(i).get("a").stringValue();
            final int j = i;
            threadPool.submitTask(new Runnable() {
                @Override
                public void run() {
                    // Print progress
                    log.info("Relating {} ", author);
                    log.info("Relating {}/{}. Author: '{}' ", j + 1, authorsRedi.size(), author);
                    task.updateDetailMessage("URI", author);
                    task.updateProgress(j + 1);
                    // Get and store author data (json) from SPARQL repository.
                    String profiledata = commonService.getAuthorDataProfile(author);
                    Document parse = Document.parse(profiledata);
                    parse.append("_id", author);
                    collection.insertOne(parse);
                }
            });
        }
        threadPool.end();
    } catch (MarmottaException | InterruptedException ex) {
        log.error(ex.getMessage(), ex);
    }
    taskManagerService.endTask(task);
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void LoadStatisticsbyInst() {
    Task task = taskManagerService.createSubTask("Caching statistics by Institution", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
        MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.STATISTICS_INST.getValue());
        collection.drop();/*from   w ww  . j  ava  2  s.  co m*/

        List<String> queries = new ArrayList();
        queries.add("inst_by_area");
        queries.add("pub_by_date");
        queries.add("author_by_inst");
        queries.add("inst_by_inst");
        queries.add("prov_by_inst");

        String uri = "";
        String name = "";
        String fullname = "";
        List<Map<String, Value>> org = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getListOrganizationQuery());
        Document parse = new Document();
        task.updateTotalSteps((org.size() + 1) * (queries.size() + 1));
        int ints = 0;
        for (Map<String, Value> o : org) {

            uri = o.get("URI").stringValue();
            name = o.get("name").stringValue();
            fullname = o.get("fullNameEs").stringValue();
            task.updateDetailMessage("Institution ", uri);
            for (String q : queries) {
                ints++;
                String response = statisticsbyInstQuery(uri, q);

                parse.append(q, Document.parse(response));

                log.info("Stats Inst {} ", uri);
                log.info("Query {}", q);

                task.updateProgress(ints);

            }

            parse.append("_id", uri);
            parse.append("name", name);
            parse.append("fullname", fullname);
            collection.insertOne(parse);
        }
        taskManagerService.endTask(task);
        // loadStadistics(MongoService.Collection.STATISTICS.getValue(), queries);
    } catch (MarmottaException ex) {
        log.error("erro" + ex);
        java.util.logging.Logger.getLogger(PopulateMongoImpl.class.getName()).log(Level.INFO, null, ex);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void networks() {
    final Task task = taskManagerService.createSubTask("Caching related authors", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
        // Delete and create collection
        final MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.RELATEDAUTHORS.getValue());
        collection.drop();/* w w  w  .  j a  v a 2 s  .co  m*/
        BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5);
        task.updateMessage("Calculating related authors");
        final List<Map<String, Value>> query = fastSparqlService.getSparqlService().query(QueryLanguage.SPARQL,
                queriesService.getAuthorsCentralGraph());
        int i = 0;
        for (final Map<String, Value> mp : query) {
            final int j = i++;
            threadPool.submitTask(new Runnable() {
                @Override
                public void run() {
                    String stringValue = mp.get("a").stringValue();
                    log.info("Relating {} ", stringValue);
                    log.info("Relating {}/{} ", j, query.size());
                    task.updateDetailMessage("URI", stringValue);
                    task.updateDetailMessage("Status", j + "/" + query.size());
                    String collaboratorsData = commonService.getCollaboratorsData(stringValue);
                    Document parse = Document.parse(collaboratorsData);
                    parse.append("_id", stringValue);
                    collection.insertOne(parse);
                }
            });

        }
        threadPool.end();
    } catch (Exception w) {
        log.debug(w.getMessage(), w);
    }
    taskManagerService.endTask(task);
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void clusters() {
    Task task = taskManagerService.createSubTask("Caching clusters", "Mongo Service");
    clustersTotals();/* w w  w.  j a v a2  s  . c om*/
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());

        // Delete and create collection
        MongoCollection<Document> collection = db.getCollection(MongoService.Collection.CLUSTERS.getValue());
        collection.drop();

        List<Map<String, Value>> clusters = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getClusterURIs());

        task.updateTotalSteps(clusters.size());

        for (int i = 0; i < clusters.size(); i++) {
            String cluster = clusters.get(i).get("c").stringValue();
            // Print progress
            log.info("Relating {}/{}. Cluster: '{}' ", i + 1, clusters.size(), cluster);
            task.updateDetailMessage("URI", cluster);
            task.updateProgress(i + 1);
            // Get and store author data (json) from SPARQL repository.
            String clusterData = commonService.getCluster(cluster);
            Document parse = Document.parse(clusterData);
            parse.append("_id", cluster);
            collection.insertOne(parse);
        }
    } catch (MarmottaException ex) {
        log.error(ex.getMessage(), ex);
    } finally {
        taskManagerService.endTask(task);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

public void clustersTotals() {
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());

        // Delete and create collection
        MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.CLUSTERSTOTALS.getValue());
        collection.drop();/* w  ww.j a v  a2  s.c  o  m*/
        log.info("Counting clusters");
        List<Map<String, Value>> query = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getClusterTotals());
        log.info("Writing totals");
        for (Map<String, Value> a : query) {
            String label = a.get("k").stringValue();
            log.info("Cluster {}", label);
            String uri = a.get("area").stringValue();
            String tot = a.get("totalAuthors").stringValue();
            Document parse = new Document();
            parse.append("_id", uri);
            parse.append("area", uri);
            parse.append("k", label);
            parse.append("totalAuthors", tot);
            List<BasicDBObject> lsdoc = new ArrayList<>();
            List<Map<String, Value>> query1 = sparqlService.query(QueryLanguage.SPARQL,
                    queriesService.getSubClusterTotals(uri));
            for (Map<String, Value> b : query1) {
                if (b.get("sc") == null) {
                    continue;
                }
                String sc = b.get("sc").stringValue();
                String k = b.get("k").stringValue();
                String totalAuthors = b.get("totalAuthors").stringValue();
                BasicDBObject parseSub = new BasicDBObject();
                parseSub.put("sc", sc);
                parseSub.put("k", k);
                parseSub.put("totalAuthors", totalAuthors);
                lsdoc.add(parseSub);
            }
            parse.append("subclusters", lsdoc);
            collection.insertOne(parse);
        }

    } catch (MarmottaException ex) {
        log.error(ex.getMessage(), ex);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void authorsByArea() {
    final Task task = taskManagerService.createSubTask("Caching Authors by Area", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());

        // Delete and create collection
        final MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.AUTHORS_AREA.getValue());
        collection.drop();/*from  www . ja va  2s. c om*/

        final List<Map<String, Value>> areas = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getClusterAndSubclusterURIs());

        task.updateTotalSteps(areas.size());
        BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5);
        for (int i = 0; i < areas.size(); i++) {
            final int j = i;
            final String cluster = areas.get(i).get("cluster").stringValue();
            final String subcluster = areas.get(i).get("subcluster").stringValue();

            threadPool.submitTask(new Runnable() {
                @Override
                public void run() {
                    // Print progress
                    log.info("Relating {}/{}. Cluster: '{}' - Subcluster: '{}'", j + 1, areas.size(), cluster,
                            subcluster);
                    task.updateDetailMessage("Cluster", cluster);
                    task.updateDetailMessage("Subluster", subcluster);
                    task.updateProgress(j + 1);
                    // Get authors of an area from the SPARQL endpoint and transform them to JSON .
                    String authorsByArea = commonService.getsubClusterGraph(cluster, subcluster);
                    Document parse = Document.parse(authorsByArea);
                    BasicDBObject key = new BasicDBObject();
                    key.put("cluster", cluster);
                    key.put("subcluster", subcluster);
                    parse.append("_id", key);
                    collection.insertOne(parse);
                }
            });

        }
        threadPool.end();
    } catch (MarmottaException | InterruptedException ex) {
        log.error(ex.getMessage(), ex);
    } finally {
        taskManagerService.endTask(task);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void authorsByDiscipline() {
    final Task task = taskManagerService.createSubTask("Caching Authors by Discipline", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());

        // Delete and create collection
        final MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.AUTHORS_DISCPLINE.getValue());
        collection.drop();//from ww  w. j  a v  a2s. c  om

        final List<Map<String, Value>> clusters = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getClusterURIs());

        task.updateTotalSteps(clusters.size());
        BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5);
        for (int i = 0; i < clusters.size(); i++) {
            final int j = i;
            final String cluster = clusters.get(i).get("c").stringValue();
            threadPool.submitTask(new Runnable() {
                @Override
                public void run() {
                    // String subcluster = areas.get(i).get("subcluster").stringValue();
                    // Print progress
                    log.info("Relating {}/{}. Cluster: '{}'", j + 1, clusters.size(), cluster);
                    task.updateDetailMessage("Cluster", cluster);
                    // task.updateDetailMessage("Subluster", subcluster);
                    task.updateProgress(j + 1);
                    // Get authors of an area from the SPARQL endpoint and transform them to JSON .
                    String authorsByDisc = commonService.getClusterGraph(cluster);
                    Document parse = Document.parse(authorsByDisc);
                    BasicDBObject key = new BasicDBObject();
                    key.put("cluster", cluster);
                    //key.put("subcluster", subcluster);
                    parse.append("_id", key);
                    collection.insertOne(parse);
                }
            });

        }
        threadPool.end();
    } catch (MarmottaException | InterruptedException ex) {
        log.error(ex.getMessage(), ex);
    } finally {
        taskManagerService.endTask(task);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void Countries() {
    Task task = taskManagerService.createSubTask("Caching countries", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());

        // Delete and create collection
        MongoCollection<Document> collection = db.getCollection(MongoService.Collection.COUNTRIES.getValue());
        collection.drop();/*w  w w . ja v  a2s.c  o m*/
        try {
            List<Map<String, Value>> countries = sparqlService.query(QueryLanguage.SPARQL,
                    queriesService.getCountries());
            task.updateTotalSteps(countries.size());
            for (int i = 0; i < countries.size(); i++) {
                String co = countries.get(i).get("co").stringValue();
                String code = getCountryCode(co);
                String countriesNodes = countrynodes(co, code).toString();
                Document parse = Document.parse(countriesNodes);
                parse.append("_id", co);
                collection.insertOne(parse);

                task.updateDetailMessage("Country", co);
                task.updateProgress(i + 1);
            }
        } catch (MarmottaException ex) {
            java.util.logging.Logger.getLogger(PopulateMongoImpl.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            taskManagerService.endTask(task);
        }
    }
}