Example usage for com.mongodb.client MongoCollection insertOne

List of usage examples for com.mongodb.client MongoCollection insertOne

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection insertOne.

Prototype

void insertOne(TDocument document);

Source Link

Document

Inserts the provided document.

Usage

From source file:dto.Dto.java

public void registrarAsesoria(String profe, String tema, String asunto, String alumno) {
    c = new Conexion();
    MongoCollection<Document> col = c.getConnection("asesorias");
    //int idUsuario = 0;
    Document doc = new Document();
    doc.append("_id", String.valueOf((Integer.parseInt(getLastAsesoriaId()) + 1)));
    doc.append("tema", tema);
    doc.append("idAsesor", profe);
    doc.append("idAlumno", alumno);
    doc.append("asunto", asunto);
    doc.append("estado", "pendiente");
    col.insertOne(doc);
}

From source file:dto.Dto.java

public void registrarTema(String tema, String usuario) {
    c = new Conexion();
    MongoCollection<Document> col = c.getConnection("tesis_alumno_asesor");
    //int idUsuario = 0;
    Document doc = new Document();
    doc.append("_id", String.valueOf((Integer.parseInt(getLastTesisId()) + 1)));
    doc.append("titulo", tema);
    doc.append("idAsesor", "0");
    doc.append("estadoP", "pendiente");
    doc.append("estadoA", "");
    doc.append("idAlumno", usuario);
    doc.append("seccion", getSeccion(usuario));
    col.insertOne(doc);
}

From source file:DutyDatabase.DutyScheduleDB.java

License:Open Source License

/**
 * Stores a scheduled calendar in the database.
 * @param id ID of the user (unique to the user).
 * @param calendarName Name of the calendar to be stored (unique to calendars owned by the user).
 * @param cal List DutyBlocks comprising the calendar to be stored.
 *///from w  ww . j  a v  a 2s.com
public void storeScheduledCalendar(String id, String calendarName, ArrayList<DutyBlock> cal) {
    //Access collection of scheduled calendars.
    MongoCollection<ScheduledDuty> collection = db.getCollection("ScheduledCalendars", ScheduledDuty.class);
    //Query parameter is uuid + calendarName.
    ScheduledDuty toInsert = new ScheduledDuty(id + calendarName, cal);
    //Insert doc to collection.
    collection.insertOne(toInsert);
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

/**
 *
 * @param queryResources query to load resources to describe.
 * @param queryDescribe query to describe each candidate; it has to be a
 * describe/construct.// w ww  .j av  a 2 s.c o  m
 * @param collection collection name in Mongo db.
 */
private void loadResources(String queryResources, String queryDescribe, String c) {
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port")); StringWriter writter = new StringWriter();) {
        RepositoryConnection conn = sesameService.getConnection();

        int num_candidates = 0;
        try {
            MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
            // Delete and create collection
            MongoCollection<Document> collection = db.getCollection(c);
            collection.drop();

            RDFWriter jsonldWritter = Rio.createWriter(RDFFormat.JSONLD, writter);
            TupleQueryResult resources = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryResources)
                    .evaluate();
            while (resources.hasNext()) {
                String resource = resources.next().getValue("subject").stringValue();
                conn.prepareGraphQuery(QueryLanguage.SPARQL, queryDescribe.replace("{}", resource))
                        .evaluate(jsonldWritter);
                Object compact = JsonLdProcessor.compact(JsonUtils.fromString(writter.toString()), context,
                        new JsonLdOptions());
                Map<String, Object> json = (Map<String, Object>) compact;
                json.put("_id", resource);
                collection.insertOne(new Document(json));
                writter.getBuffer().setLength(0);
                log.info("{} inserting describe for resource {}", ++num_candidates, resource);
            }
            log.info("Load {} resources into {} collection", num_candidates, collection);
        } finally {
            conn.close();
        }
    } catch (RepositoryException ex) {
        log.error("Cannot retrieve Sesame connection", ex);
    } catch (MalformedQueryException ex) {
        log.error("Query cannot be processed", ex);
    } catch (QueryEvaluationException ex) {
        log.error("Execution of query failed", ex);
    } catch (RDFHandlerException ex) {
        log.error("Cannot handle RDFWriter", ex);
    } catch (JsonLdError ex) {
        log.error("Cannot convert string to JSON-LD", ex);
    } catch (IOException ex) {
        log.error("IO error", ex);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

private void loadStadistics(String c, HashMap<String, String> queries) {
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port")); StringWriter writter = new StringWriter();) {
        RepositoryConnection conn = sesameService.getConnection();

        try {/*from   w w  w .j a v a2 s .  co  m*/
            MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
            // Delete and create collection
            MongoCollection<Document> collection = db.getCollection(c);
            collection.drop();

            RDFWriter jsonldWritter = Rio.createWriter(RDFFormat.JSONLD, writter);
            for (String key : queries.keySet()) {
                log.info("Getting {} query", key);

                conn.prepareGraphQuery(QueryLanguage.SPARQL, queries.get(key)).evaluate(jsonldWritter);
                Object compact = JsonLdProcessor.compact(JsonUtils.fromString(writter.toString()), context,
                        new JsonLdOptions());
                Map<String, Object> json = (Map<String, Object>) compact;
                json.put("_id", key);
                collection.insertOne(new Document(json));
                writter.getBuffer().setLength(0);
                log.info("Load aggregation into {} collection for id '{}'", c, key);
            }
        } finally {
            conn.close();
        }
    } catch (RepositoryException ex) {
        log.error("Cannot retrieve Sesame connection", ex);
    } catch (MalformedQueryException ex) {
        log.error("Query cannot be processed", ex);
    } catch (QueryEvaluationException ex) {
        log.error("Execution of query failed", ex);
    } catch (RDFHandlerException ex) {
        log.error("Cannot handle RDFWriter", ex);
    } catch (JsonLdError ex) {
        log.error("Cannot convert string to JSON-LD", ex);
    } catch (IOException ex) {
        log.error("IO error", ex);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void authors() {
    final Task task = taskManagerService.createSubTask("Caching authors profiles", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
        // Delete and create collection
        final MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.AUTHORS.getValue());
        collection.drop();//from ww w .jav a  2s .c  o  m
        final List<Map<String, Value>> authorsRedi = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getAuthorsCentralGraph());
        task.updateTotalSteps(authorsRedi.size());
        BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5);
        for (int i = 0; i < authorsRedi.size(); i++) {
            final String author = authorsRedi.get(i).get("a").stringValue();
            final int j = i;
            threadPool.submitTask(new Runnable() {
                @Override
                public void run() {
                    // Print progress
                    log.info("Relating {} ", author);
                    log.info("Relating {}/{}. Author: '{}' ", j + 1, authorsRedi.size(), author);
                    task.updateDetailMessage("URI", author);
                    task.updateProgress(j + 1);
                    // Get and store author data (json) from SPARQL repository.
                    String profiledata = commonService.getAuthorDataProfile(author);
                    Document parse = Document.parse(profiledata);
                    parse.append("_id", author);
                    collection.insertOne(parse);
                }
            });
        }
        threadPool.end();
    } catch (MarmottaException | InterruptedException ex) {
        log.error(ex.getMessage(), ex);
    }
    taskManagerService.endTask(task);
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void LoadStatisticsbyInst() {
    Task task = taskManagerService.createSubTask("Caching statistics by Institution", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
        MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.STATISTICS_INST.getValue());
        collection.drop();/* www  .  j a va 2s .  com*/

        List<String> queries = new ArrayList();
        queries.add("inst_by_area");
        queries.add("pub_by_date");
        queries.add("author_by_inst");
        queries.add("inst_by_inst");
        queries.add("prov_by_inst");

        String uri = "";
        String name = "";
        String fullname = "";
        List<Map<String, Value>> org = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getListOrganizationQuery());
        Document parse = new Document();
        task.updateTotalSteps((org.size() + 1) * (queries.size() + 1));
        int ints = 0;
        for (Map<String, Value> o : org) {

            uri = o.get("URI").stringValue();
            name = o.get("name").stringValue();
            fullname = o.get("fullNameEs").stringValue();
            task.updateDetailMessage("Institution ", uri);
            for (String q : queries) {
                ints++;
                String response = statisticsbyInstQuery(uri, q);

                parse.append(q, Document.parse(response));

                log.info("Stats Inst {} ", uri);
                log.info("Query {}", q);

                task.updateProgress(ints);

            }

            parse.append("_id", uri);
            parse.append("name", name);
            parse.append("fullname", fullname);
            collection.insertOne(parse);
        }
        taskManagerService.endTask(task);
        // loadStadistics(MongoService.Collection.STATISTICS.getValue(), queries);
    } catch (MarmottaException ex) {
        log.error("erro" + ex);
        java.util.logging.Logger.getLogger(PopulateMongoImpl.class.getName()).log(Level.INFO, null, ex);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void networks() {
    final Task task = taskManagerService.createSubTask("Caching related authors", "Mongo Service");
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());
        // Delete and create collection
        final MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.RELATEDAUTHORS.getValue());
        collection.drop();//ww  w . ja  va 2  s. c o  m
        BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5);
        task.updateMessage("Calculating related authors");
        final List<Map<String, Value>> query = fastSparqlService.getSparqlService().query(QueryLanguage.SPARQL,
                queriesService.getAuthorsCentralGraph());
        int i = 0;
        for (final Map<String, Value> mp : query) {
            final int j = i++;
            threadPool.submitTask(new Runnable() {
                @Override
                public void run() {
                    String stringValue = mp.get("a").stringValue();
                    log.info("Relating {} ", stringValue);
                    log.info("Relating {}/{} ", j, query.size());
                    task.updateDetailMessage("URI", stringValue);
                    task.updateDetailMessage("Status", j + "/" + query.size());
                    String collaboratorsData = commonService.getCollaboratorsData(stringValue);
                    Document parse = Document.parse(collaboratorsData);
                    parse.append("_id", stringValue);
                    collection.insertOne(parse);
                }
            });

        }
        threadPool.end();
    } catch (Exception w) {
        log.debug(w.getMessage(), w);
    }
    taskManagerService.endTask(task);
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

@Override
public void clusters() {
    Task task = taskManagerService.createSubTask("Caching clusters", "Mongo Service");
    clustersTotals();//from   w  w  w.j  av a  2  s.  c om
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());

        // Delete and create collection
        MongoCollection<Document> collection = db.getCollection(MongoService.Collection.CLUSTERS.getValue());
        collection.drop();

        List<Map<String, Value>> clusters = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getClusterURIs());

        task.updateTotalSteps(clusters.size());

        for (int i = 0; i < clusters.size(); i++) {
            String cluster = clusters.get(i).get("c").stringValue();
            // Print progress
            log.info("Relating {}/{}. Cluster: '{}' ", i + 1, clusters.size(), cluster);
            task.updateDetailMessage("URI", cluster);
            task.updateProgress(i + 1);
            // Get and store author data (json) from SPARQL repository.
            String clusterData = commonService.getCluster(cluster);
            Document parse = Document.parse(clusterData);
            parse.append("_id", cluster);
            collection.insertOne(parse);
        }
    } catch (MarmottaException ex) {
        log.error(ex.getMessage(), ex);
    } finally {
        taskManagerService.endTask(task);
    }
}

From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java

License:Apache License

public void clustersTotals() {
    try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"),
            conf.getIntConfiguration("mongo.port"));) {
        MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName());

        // Delete and create collection
        MongoCollection<Document> collection = db
                .getCollection(MongoService.Collection.CLUSTERSTOTALS.getValue());
        collection.drop();//from   w  ww.j  av a2 s  .  c  om
        log.info("Counting clusters");
        List<Map<String, Value>> query = sparqlService.query(QueryLanguage.SPARQL,
                queriesService.getClusterTotals());
        log.info("Writing totals");
        for (Map<String, Value> a : query) {
            String label = a.get("k").stringValue();
            log.info("Cluster {}", label);
            String uri = a.get("area").stringValue();
            String tot = a.get("totalAuthors").stringValue();
            Document parse = new Document();
            parse.append("_id", uri);
            parse.append("area", uri);
            parse.append("k", label);
            parse.append("totalAuthors", tot);
            List<BasicDBObject> lsdoc = new ArrayList<>();
            List<Map<String, Value>> query1 = sparqlService.query(QueryLanguage.SPARQL,
                    queriesService.getSubClusterTotals(uri));
            for (Map<String, Value> b : query1) {
                if (b.get("sc") == null) {
                    continue;
                }
                String sc = b.get("sc").stringValue();
                String k = b.get("k").stringValue();
                String totalAuthors = b.get("totalAuthors").stringValue();
                BasicDBObject parseSub = new BasicDBObject();
                parseSub.put("sc", sc);
                parseSub.put("k", k);
                parseSub.put("totalAuthors", totalAuthors);
                lsdoc.add(parseSub);
            }
            parse.append("subclusters", lsdoc);
            collection.insertOne(parse);
        }

    } catch (MarmottaException ex) {
        log.error(ex.getMessage(), ex);
    }
}