List of usage examples for com.mongodb.client MongoCollection deleteOne
DeleteResult deleteOne(Bson filter);
From source file:module.script.ImportArrayExpress1733.java
License:Open Source License
public ImportArrayExpress1733() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSeries = db.getCollection("series"); MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Pattern ===== String patternText = "\\[[\\p{Print}\\p{Space}]+\\]"; ;// w w w. j a v a2 s . c o m Pattern pattern = Pattern.compile(patternText); // ===== Series ===== for (String accession : listAccessions) { List<String> accessionAsList = new ArrayList<String>(); accessionAsList.add(accession); String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession + ".idf.txt"; System.out.println(urlString); String text = webService.loadUrl(urlString); String[] parts = text.split(lineSeparator); List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts)); AESeries series = new AESeries(dataSeries); System.out.println(series); // ===== Check if already imported as a GSE ===== boolean isGseFound = false; String gseNumber = null; for (String secondaryAccession : series.getListAccessions()) { if (secondaryAccession.startsWith("GSE")) { gseNumber = secondaryAccession; Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first(); isGseFound = gse != null; } } int nbImportedSamples = 0; if (!isGseFound) { // ===== Create Mongo series ===== Document docSeries = mongoService.createSeries(accession, series.getTitle(), null, series.getSubmissionDate(), series.getSubmissionDate()); if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) { docSeries.put("secondary_accessions", series.getListAccessions()); } if (false) { UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } } System.out.println(docSeries); // ===== Import clinical data ===== String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf(); System.out.println(url); String clindata = webService.loadUrl(url); String[] clinparts = clindata.split(lineSeparator); List<String> data = new ArrayList<String>(Arrays.asList(clinparts)); // ===== Recognize samples ===== List<String> header = this.createHeader(data.get(0), pattern); System.out.println(header); for (int i = 1; i < data.size(); i++) { Integer nbSamples = data.size() - 1; Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header); String idSample = this.createIdSample(mapParameters); if (idSample == null) { System.err.println("ERROR: idSample is not recongnized for " + accession); System.out.println("Line " + i); System.out.println(mapParameters); mongoClient.close(); System.exit(0); } else { if (formatIdSample) { idSample = "E-MTAB-2836" + "-" + idSample; idSample = idSample.trim().replaceAll(" ", "-"); } } idSample = idSample.split(" ")[0].trim(); // === Organism === String organism = (String) mapParameters.get("organism"); if (organism == null || organism.isEmpty()) { organism = defaultOrganism; } // === Platform === String platform = (String) mapParameters.get("LIBRARY_STRATEGY"); if (platform != null && !platform.isEmpty()) { platform = platform.toLowerCase().trim(); } else { platform = defaultPlatform; } Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first(); boolean docAlreadyExist = docSampleExist != null; System.out.println("docAlreadyExist " + docAlreadyExist); // === Delete old if already exist === if (docAlreadyExist) { List<String> listSeries = (List<String>) docSampleExist.get("series"); Set<String> setSeries = new HashSet<String>(); listSeries.add(accession); setSeries.addAll(listSeries); listSeries.clear(); listSeries.addAll(setSeries); docSampleExist.append("series", listSeries); System.out.println(docSampleExist); if (commit) { collectionSamples.deleteOne(eq("_id", docSampleExist.get("_id"))); collectionSamples.insertOne(docSampleExist); } } } } else { System.out.println("GEO accession " + gseNumber + " corresponding to " + accession + " exists already. Skip import."); } System.out.println("Number of imported samples: " + nbImportedSamples); } mongoClient.close(); }
From source file:module.script.proallchen.ImportProallChenSupplementary.java
License:Open Source License
public ImportProallChenSupplementary() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); // ===== Samples ====== MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Excel data loader ===== String inputfile = this.getInputDirectory() + this.getDirSeparator() + "DB_ALL_JIN_RNASEC_clinical_data_supplementary.xlsx"; System.out.println("LOADING \t " + inputfile); ExcelService excelService = new ExcelService(); excelService.load(inputfile);//from ww w . ja v a2 s . com System.out.println(excelService.getHeader()); String idSeries = "PROALL_CHEN"; List<String> listSeries = new ArrayList<String>(); listSeries.add(idSeries); for (int i = 0; i < excelService.getData().size(); i++) { List<Object> line = excelService.getData().get(i); String idSample = "ESM" + line.get(0); System.out.println(idSample + " " + line); // ===== Sample Document ===== Document docSample = new Document(); docSample.append("_id", idSample).append("main_gse_number", idSeries).append("series", listSeries) .append("organism", "Homo sapiens").append("submission_date", today) .append("last_update", today).append("import_date", today).append("analyzed", false); // ===== Mandatory parameters ===== Document expGroup = generateExpGroup(idSeries, idSample); docSample.append("exp_group", expGroup); // ===== Supplementary parameters ===== Document parameters = new Document(); parameters.append("id_sample", idSample); // === Attributes === for (int j = 0; j < excelService.getHeader().size(); j++) { String header = (String) excelService.getHeader().get(j); Object value = line.get(j); // System.out.println(header + " = " + value); parameters.append(header, value); } docSample.append("parameters", parameters); System.out.println(docSample); // === Delete if already exist === collectionSamples.deleteOne(eq("_id", idSample)); // ===== Insert data ===== collectionSamples.insertOne(docSample); } mongoClient.close(); }
From source file:mongodb.QuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *///from w w w .j a va 2 s . co m public static void main(final String[] args) { //represents a pool of connections to the database MongoClient mongoClient = new MongoClient("10.9.17.105", 27017); // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("test"); // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { @Override public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110))); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), new Document("$inc", new Document("i", 100))); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); collection.find().forEach(printBlock); // Clean up // database.drop(); // release resources mongoClient.close(); }
From source file:mongoSample.MongoSample.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args/* w w w .j a v a 2 s . c o m*/ * takes an optional single argument for the connection string */ public static void main(final String[] args) { String mongoServer = args[0]; MongoClient mongoClient = new MongoClient(mongoServer); MongoDatabase database = mongoClient.getDatabase("sakila"); MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest // earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can // explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { @Override public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110))); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), new Document("$inc", new Document("i", 100))); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); // collection.find().forEach(printBlock); // Clean up //database.drop(); // release resources mongoClient.close(); }
From source file:nl.mvdb.mongodb.RemoveStudentsLowestScore.java
License:Apache License
public static void main(String[] args) { MongoClient client = new MongoClient(); try {//from w w w .j a v a 2 s .c om MongoDatabase database = client.getDatabase("students"); MongoCollection<Document> collection = database.getCollection("grades"); MongoCursor<Document> iterator = collection.find().sort(ascending("student_id", "score")).iterator(); Integer lastStudentId = null; while (iterator.hasNext()) { Document gradingDoc = iterator.next(); Integer studentId = gradingDoc.getInteger("student_id"); if (!studentId.equals(lastStudentId)) { lastStudentId = studentId; System.out.println(lastStudentId); collection.deleteOne(gradingDoc); } } System.out.println(collection.count()); } finally { client.close(); } }
From source file:org.apache.eagle.alert.metadata.impl.MongoMetadataDaoImpl.java
License:Apache License
private <T> OpResult removeObject(MongoCollection<Document> collection, String nameField, String name) { BsonDocument filter = new BsonDocument(); filter.append(nameField, new BsonString(name)); DeleteResult dr = collection.deleteOne(filter); OpResult result = new OpResult(); result.code = 200;//ww w. j av a2 s.c o m result.message = String.format(" %d config item removed!", dr.getDeletedCount()); return result; }
From source file:org.apache.nifi.processors.mongodb.DeleteMongo.java
License:Apache License
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile flowFile = session.get();// w ww. j av a 2 s . c o m final WriteConcern writeConcern = getWriteConcern(context); final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern); final String deleteMode = context.getProperty(DELETE_MODE).getValue(); final String deleteAttr = flowFile.getAttribute("mongodb.delete.mode"); final Boolean failMode = context.getProperty(FAIL_ON_NO_DELETE).asBoolean(); if (deleteMode.equals(DELETE_ATTR.getValue()) && (StringUtils.isEmpty(deleteAttr) || !ALLOWED_DELETE_VALUES.contains(deleteAttr.toLowerCase()))) { getLogger().error(String.format("%s is not an allowed value for mongodb.delete.mode", deleteAttr)); session.transfer(flowFile, REL_FAILURE); return; } try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); session.exportTo(flowFile, bos); bos.close(); String json = new String(bos.toByteArray()); Document query = Document.parse(json); DeleteResult result; if (deleteMode.equals(DELETE_ONE.getValue()) || (deleteMode.equals(DELETE_ATTR.getValue()) && deleteAttr.toLowerCase().equals("one"))) { result = collection.deleteOne(query); } else { result = collection.deleteMany(query); } if (failMode && result.getDeletedCount() == 0) { session.transfer(flowFile, REL_FAILURE); } else { session.transfer(flowFile, REL_SUCCESS); } } catch (Exception ex) { getLogger().error("Could not send a delete to MongoDB, failing...", ex); session.transfer(flowFile, REL_FAILURE); } }
From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java
License:Apache License
/** * Removes a document in the collection. * * @param databaseName the database//from w w w . j a va 2s . c om * @param collectionName the collection * @param documentId the document identifier to delete * @return the result of the operation * @throws DatasourceException * @throws NotFoundException */ @Override public boolean deleteById(String databaseName, String collectionName, String documentId) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (!collectionExists(databaseName, collectionName)) { throw new NotFoundException("The collection doesn't exist in the datasource"); } MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mongoDatabase.getCollection(collectionName); Document query = new Document("_id", new ObjectId(documentId)); if (collection.count(query) == 0) { throw new NotFoundException("The document doesn't exist in the datasource"); } collection.deleteOne(query); return true; } catch (MongoException ex) { logger.error("An error occured while updating the document", ex); throw new DatasourceException("An error occured while updating the document"); } }
From source file:org.codinjutsu.tools.nosql.mongo.logic.SingleMongoClient.java
License:Apache License
public void delete(ServerConfiguration configuration, SingleMongoCollection singleMongoCollection, Object _id) { MongoClient mongo = null;//from w ww .j a v a 2 s .co m try { String databaseName = singleMongoCollection.getDatabaseName(); mongo = createMongoClient(configuration); MongoDatabase database = mongo.getDatabase(databaseName); MongoCollection<Document> collection = database.getCollection(singleMongoCollection.getName()); collection.deleteOne(Filters.eq("_id", _id)); } catch (UnknownHostException ex) { throw new ConfigurationException(ex); } finally { if (mongo != null) { mongo.close(); } } }
From source file:org.jaqpot.core.db.entitymanager.MongoDBEntityManager.java
License:Open Source License
@Override public void remove(JaqpotEntity entity) { MongoDatabase db = mongoClient.getDatabase(database); MongoCollection<Document> collection = db.getCollection(collectionNames.get(entity.getClass())); collection.deleteOne(new Document("_id", entity.getId())); }