List of usage examples for com.mongodb.client MongoCollection drop
void drop();
From source file:com.redhat.thermostat.gateway.common.mongodb.executor.MongoExecutor.java
License:Open Source License
public MongoDataResultContainer execDeleteRequest(MongoCollection<Document> collection, List<String> queries, Set<String> realms) { MongoDataResultContainer metaDataContainer = new MongoDataResultContainer(); if (queries != null && !queries.isEmpty() || realms != null && !realms.isEmpty()) { Bson bsonQueries = MongoRequestFilters.buildQuery(queries, realms); metaDataContainer.setDeleteReqMatches(collection.count(bsonQueries)); collection.deleteMany(bsonQueries); } else {//from ww w . jav a 2s . co m metaDataContainer.setDeleteReqMatches(collection.count()); collection.drop(); } return metaDataContainer; }
From source file:consultasEntradaSaidaArquivo.LeituraXLS.java
public static void leituraDeArquivos(MongoDatabase db) throws IOException, BiffException { File folder = new File("src/arquivosPlataformaP56"); File[] listOfFiles = folder.listFiles(); MongoCollection myCollection = db.getCollection("pt"); myCollection.drop(); for (File file : listOfFiles) { if (file.isFile()) { lerArquivo(db, file.getName()); }/*from w w w . ja v a 2 s. co m*/ } }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
/** * * @param queryResources query to load resources to describe. * @param queryDescribe query to describe each candidate; it has to be a * describe/construct.//w ww. j av a 2s .c om * @param collection collection name in Mongo db. */ private void loadResources(String queryResources, String queryDescribe, String c) { try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port")); StringWriter writter = new StringWriter();) { RepositoryConnection conn = sesameService.getConnection(); int num_candidates = 0; try { MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); // Delete and create collection MongoCollection<Document> collection = db.getCollection(c); collection.drop(); RDFWriter jsonldWritter = Rio.createWriter(RDFFormat.JSONLD, writter); TupleQueryResult resources = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryResources) .evaluate(); while (resources.hasNext()) { String resource = resources.next().getValue("subject").stringValue(); conn.prepareGraphQuery(QueryLanguage.SPARQL, queryDescribe.replace("{}", resource)) .evaluate(jsonldWritter); Object compact = JsonLdProcessor.compact(JsonUtils.fromString(writter.toString()), context, new JsonLdOptions()); Map<String, Object> json = (Map<String, Object>) compact; json.put("_id", resource); collection.insertOne(new Document(json)); writter.getBuffer().setLength(0); log.info("{} inserting describe for resource {}", ++num_candidates, resource); } log.info("Load {} resources into {} collection", num_candidates, collection); } finally { conn.close(); } } catch (RepositoryException ex) { log.error("Cannot retrieve Sesame connection", ex); } catch (MalformedQueryException ex) { log.error("Query cannot be processed", ex); } catch (QueryEvaluationException ex) { log.error("Execution of query failed", ex); } catch (RDFHandlerException ex) { log.error("Cannot handle RDFWriter", ex); } catch (JsonLdError ex) { log.error("Cannot convert string to JSON-LD", ex); } catch (IOException ex) { log.error("IO error", ex); } }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
private void loadStadistics(String c, HashMap<String, String> queries) { try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port")); StringWriter writter = new StringWriter();) { RepositoryConnection conn = sesameService.getConnection(); try {// w ww . jav a2 s . c om MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); // Delete and create collection MongoCollection<Document> collection = db.getCollection(c); collection.drop(); RDFWriter jsonldWritter = Rio.createWriter(RDFFormat.JSONLD, writter); for (String key : queries.keySet()) { log.info("Getting {} query", key); conn.prepareGraphQuery(QueryLanguage.SPARQL, queries.get(key)).evaluate(jsonldWritter); Object compact = JsonLdProcessor.compact(JsonUtils.fromString(writter.toString()), context, new JsonLdOptions()); Map<String, Object> json = (Map<String, Object>) compact; json.put("_id", key); collection.insertOne(new Document(json)); writter.getBuffer().setLength(0); log.info("Load aggregation into {} collection for id '{}'", c, key); } } finally { conn.close(); } } catch (RepositoryException ex) { log.error("Cannot retrieve Sesame connection", ex); } catch (MalformedQueryException ex) { log.error("Query cannot be processed", ex); } catch (QueryEvaluationException ex) { log.error("Execution of query failed", ex); } catch (RDFHandlerException ex) { log.error("Cannot handle RDFWriter", ex); } catch (JsonLdError ex) { log.error("Cannot convert string to JSON-LD", ex); } catch (IOException ex) { log.error("IO error", ex); } }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
@Override public void authors() { final Task task = taskManagerService.createSubTask("Caching authors profiles", "Mongo Service"); try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port"));) { MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); // Delete and create collection final MongoCollection<Document> collection = db .getCollection(MongoService.Collection.AUTHORS.getValue()); collection.drop(); final List<Map<String, Value>> authorsRedi = sparqlService.query(QueryLanguage.SPARQL, queriesService.getAuthorsCentralGraph()); task.updateTotalSteps(authorsRedi.size()); BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5); for (int i = 0; i < authorsRedi.size(); i++) { final String author = authorsRedi.get(i).get("a").stringValue(); final int j = i; threadPool.submitTask(new Runnable() { @Override//from w ww. jav a2 s .c o m public void run() { // Print progress log.info("Relating {} ", author); log.info("Relating {}/{}. Author: '{}' ", j + 1, authorsRedi.size(), author); task.updateDetailMessage("URI", author); task.updateProgress(j + 1); // Get and store author data (json) from SPARQL repository. String profiledata = commonService.getAuthorDataProfile(author); Document parse = Document.parse(profiledata); parse.append("_id", author); collection.insertOne(parse); } }); } threadPool.end(); } catch (MarmottaException | InterruptedException ex) { log.error(ex.getMessage(), ex); } taskManagerService.endTask(task); }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
@Override public void LoadStatisticsbyInst() { Task task = taskManagerService.createSubTask("Caching statistics by Institution", "Mongo Service"); try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port"));) { MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); MongoCollection<Document> collection = db .getCollection(MongoService.Collection.STATISTICS_INST.getValue()); collection.drop(); List<String> queries = new ArrayList(); queries.add("inst_by_area"); queries.add("pub_by_date"); queries.add("author_by_inst"); queries.add("inst_by_inst"); queries.add("prov_by_inst"); String uri = ""; String name = ""; String fullname = ""; List<Map<String, Value>> org = sparqlService.query(QueryLanguage.SPARQL, queriesService.getListOrganizationQuery()); Document parse = new Document(); task.updateTotalSteps((org.size() + 1) * (queries.size() + 1)); int ints = 0; for (Map<String, Value> o : org) { uri = o.get("URI").stringValue(); name = o.get("name").stringValue(); fullname = o.get("fullNameEs").stringValue(); task.updateDetailMessage("Institution ", uri); for (String q : queries) { ints++;/* ww w.j a v a2s. c o m*/ String response = statisticsbyInstQuery(uri, q); parse.append(q, Document.parse(response)); log.info("Stats Inst {} ", uri); log.info("Query {}", q); task.updateProgress(ints); } parse.append("_id", uri); parse.append("name", name); parse.append("fullname", fullname); collection.insertOne(parse); } taskManagerService.endTask(task); // loadStadistics(MongoService.Collection.STATISTICS.getValue(), queries); } catch (MarmottaException ex) { log.error("erro" + ex); java.util.logging.Logger.getLogger(PopulateMongoImpl.class.getName()).log(Level.INFO, null, ex); } }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
@Override public void networks() { final Task task = taskManagerService.createSubTask("Caching related authors", "Mongo Service"); try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port"));) { MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); // Delete and create collection final MongoCollection<Document> collection = db .getCollection(MongoService.Collection.RELATEDAUTHORS.getValue()); collection.drop(); BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5); task.updateMessage("Calculating related authors"); final List<Map<String, Value>> query = fastSparqlService.getSparqlService().query(QueryLanguage.SPARQL, queriesService.getAuthorsCentralGraph()); int i = 0; for (final Map<String, Value> mp : query) { final int j = i++; threadPool.submitTask(new Runnable() { @Override//from www. ja v a 2s . c o m public void run() { String stringValue = mp.get("a").stringValue(); log.info("Relating {} ", stringValue); log.info("Relating {}/{} ", j, query.size()); task.updateDetailMessage("URI", stringValue); task.updateDetailMessage("Status", j + "/" + query.size()); String collaboratorsData = commonService.getCollaboratorsData(stringValue); Document parse = Document.parse(collaboratorsData); parse.append("_id", stringValue); collection.insertOne(parse); } }); } threadPool.end(); } catch (Exception w) { log.debug(w.getMessage(), w); } taskManagerService.endTask(task); }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
@Override public void clusters() { Task task = taskManagerService.createSubTask("Caching clusters", "Mongo Service"); clustersTotals();//from www . ja va2 s . c o m try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port"));) { MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); // Delete and create collection MongoCollection<Document> collection = db.getCollection(MongoService.Collection.CLUSTERS.getValue()); collection.drop(); List<Map<String, Value>> clusters = sparqlService.query(QueryLanguage.SPARQL, queriesService.getClusterURIs()); task.updateTotalSteps(clusters.size()); for (int i = 0; i < clusters.size(); i++) { String cluster = clusters.get(i).get("c").stringValue(); // Print progress log.info("Relating {}/{}. Cluster: '{}' ", i + 1, clusters.size(), cluster); task.updateDetailMessage("URI", cluster); task.updateProgress(i + 1); // Get and store author data (json) from SPARQL repository. String clusterData = commonService.getCluster(cluster); Document parse = Document.parse(clusterData); parse.append("_id", cluster); collection.insertOne(parse); } } catch (MarmottaException ex) { log.error(ex.getMessage(), ex); } finally { taskManagerService.endTask(task); } }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
public void clustersTotals() { try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port"));) { MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); // Delete and create collection MongoCollection<Document> collection = db .getCollection(MongoService.Collection.CLUSTERSTOTALS.getValue()); collection.drop(); log.info("Counting clusters"); List<Map<String, Value>> query = sparqlService.query(QueryLanguage.SPARQL, queriesService.getClusterTotals()); log.info("Writing totals"); for (Map<String, Value> a : query) { String label = a.get("k").stringValue(); log.info("Cluster {}", label); String uri = a.get("area").stringValue(); String tot = a.get("totalAuthors").stringValue(); Document parse = new Document(); parse.append("_id", uri); parse.append("area", uri); parse.append("k", label); parse.append("totalAuthors", tot); List<BasicDBObject> lsdoc = new ArrayList<>(); List<Map<String, Value>> query1 = sparqlService.query(QueryLanguage.SPARQL, queriesService.getSubClusterTotals(uri)); for (Map<String, Value> b : query1) { if (b.get("sc") == null) { continue; }//from w w w .j a va 2s .com String sc = b.get("sc").stringValue(); String k = b.get("k").stringValue(); String totalAuthors = b.get("totalAuthors").stringValue(); BasicDBObject parseSub = new BasicDBObject(); parseSub.put("sc", sc); parseSub.put("k", k); parseSub.put("totalAuthors", totalAuthors); lsdoc.add(parseSub); } parse.append("subclusters", lsdoc); collection.insertOne(parse); } } catch (MarmottaException ex) { log.error(ex.getMessage(), ex); } }
From source file:edu.ucuenca.storage.services.PopulateMongoImpl.java
License:Apache License
@Override public void authorsByArea() { final Task task = taskManagerService.createSubTask("Caching Authors by Area", "Mongo Service"); try (MongoClient client = new MongoClient(conf.getStringConfiguration("mongo.host"), conf.getIntConfiguration("mongo.port"));) { MongoDatabase db = client.getDatabase(MongoService.Database.NAME.getDBName()); // Delete and create collection final MongoCollection<Document> collection = db .getCollection(MongoService.Collection.AUTHORS_AREA.getValue()); collection.drop(); final List<Map<String, Value>> areas = sparqlService.query(QueryLanguage.SPARQL, queriesService.getClusterAndSubclusterURIs()); task.updateTotalSteps(areas.size()); BoundedExecutor threadPool = BoundedExecutor.getThreadPool(5); for (int i = 0; i < areas.size(); i++) { final int j = i; final String cluster = areas.get(i).get("cluster").stringValue(); final String subcluster = areas.get(i).get("subcluster").stringValue(); threadPool.submitTask(new Runnable() { @Override// w w w .j a v a2 s .c o m public void run() { // Print progress log.info("Relating {}/{}. Cluster: '{}' - Subcluster: '{}'", j + 1, areas.size(), cluster, subcluster); task.updateDetailMessage("Cluster", cluster); task.updateDetailMessage("Subluster", subcluster); task.updateProgress(j + 1); // Get authors of an area from the SPARQL endpoint and transform them to JSON . String authorsByArea = commonService.getsubClusterGraph(cluster, subcluster); Document parse = Document.parse(authorsByArea); BasicDBObject key = new BasicDBObject(); key.put("cluster", cluster); key.put("subcluster", subcluster); parse.append("_id", key); collection.insertOne(parse); } }); } threadPool.end(); } catch (MarmottaException | InterruptedException ex) { log.error(ex.getMessage(), ex); } finally { taskManagerService.endTask(task); } }