List of usage examples for com.mongodb.client MongoCollection bulkWrite
BulkWriteResult bulkWrite(ClientSession clientSession, List<? extends WriteModel<? extends TDocument>> requests);
From source file:com.everydots.kafka.connect.mongodb.MongoDbSinkTask.java
License:Apache License
@Override public void put(Collection<SinkRecord> records) { if (records.isEmpty()) { logger.debug("no records to write for current poll operation"); return;/*w ww. j a va 2 s .c om*/ } MongoCollection<BsonDocument> mongoCollection = database.getCollection( sinkConfig.getString(MongoDbSinkConnectorConfig.MONGODB_COLLECTION_CONF), BsonDocument.class); List<? extends WriteModel<BsonDocument>> docsToWrite = sinkConfig.isUsingCdcHandler() ? buildWriteModelCDC(records) : buildWriteModel(records); try { logger.debug("#records to write: {}", docsToWrite.size()); if (!docsToWrite.isEmpty()) { BulkWriteResult result = mongoCollection.bulkWrite(docsToWrite, BULK_WRITE_OPTIONS); logger.debug("write result: " + result.toString()); } } catch (MongoException mexc) { if (mexc instanceof BulkWriteException) { BulkWriteException bwe = (BulkWriteException) mexc; logger.error("mongodb bulk write (partially) failed", bwe); logger.error(bwe.getWriteResult().toString()); logger.error(bwe.getWriteErrors().toString()); logger.error(bwe.getWriteConcernError().toString()); } else { logger.error("error on mongodb operation", mexc); logger.error("writing {} record(s) failed -> remaining retries ({})", records.size(), remainingRetries); } if (remainingRetries-- <= 0) { throw new ConnectException( "couldn't successfully process records" + " despite retrying -> giving up :(", mexc); } logger.debug("deferring retry operation for {}ms", deferRetryMs); context.timeout(deferRetryMs); throw new RetriableException(mexc.getMessage(), mexc); } }
From source file:com.helion3.prism.storage.mongodb.MongoRecords.java
License:MIT License
@Override public StorageWriteResult write(List<DataContainer> containers) throws Exception { MongoCollection<Document> collection = MongoStorageAdapter .getCollection(MongoStorageAdapter.collectionEventRecordsName); // Build an array of documents List<WriteModel<Document>> documents = new ArrayList<WriteModel<Document>>(); for (DataContainer container : containers) { Document document = documentFromView(container); //Prism.getLogger().debug(DataUtil.jsonFromDataView(container).toString()); // TTL/* w w w . j a v a2s . co m*/ document.append("Expires", DateUtil.parseTimeStringToDate(expiration, true)); // Insert documents.add(new InsertOneModel<Document>(document)); } // Write collection.bulkWrite(documents, bulkWriteOptions); // @todo implement real results, BulkWriteResult return new StorageWriteResult(); }
From source file:net.acesinc.nifi.processors.mongodb.PartialUpdateMongo.java
protected BulkWriteResult performBlukUpdate(List<Map<String, Document>> updateDocs, ProcessContext context, ProcessSession session) {//from w w w .j a va 2 s . co m final ProcessorLog logger = getLogger(); StopWatch watch = new StopWatch(true); logger.debug("Performing Bulk Update of [ " + updateDocs.size() + " ] documents"); final WriteConcern writeConcern = getWriteConcern(context); final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern); List<WriteModel<Document>> updates = new ArrayList<>(); for (Map<String, Document> update : updateDocs) { UpdateOneModel<Document> upOne = new UpdateOneModel<>(update.get("query"), // find part update.get("update"), // update part new UpdateOptions().upsert(true) // options like upsert ); updates.add(upOne); } BulkWriteResult bulkWriteResult = collection.bulkWrite(updates, new BulkWriteOptions().ordered(false)); return bulkWriteResult; }
From source file:net.es.netshell.mongodb.MongoDBProvider.java
License:Open Source License
@Override public final void store(List<ResourceAnchor> anchors) throws IOException { if (!KernelThread.currentKernelThread().isPrivileged()) { throw new SecurityException("store db list - not authorized"); }//from w ww . j a va 2 s . c om HashMap<String, ArrayList<WriteModel>> collectionRequests = new HashMap<String, ArrayList<WriteModel>>(); // Build the bulk requests per collection for (ResourceAnchor anchor : anchors) { String user = anchor.getContainerOwner(); String collection = anchor.getContainerName(); String collectionName = user + "_" + collection; ArrayList<WriteModel> requests; if (!collectionRequests.containsKey(collectionName)) { requests = new ArrayList<WriteModel>(); collectionRequests.put(collectionName, requests); } else { requests = collectionRequests.get(collectionName); } try { // Likely to be in the Resource cache. Otherwise replace by itself. Resource resource = Resource.findByName(user, collection, anchor.getResourceName()); Document doc = Document.parse(resource.saveToJSON()); Document query = new Document("resourceName", resource.getResourceName()); ; ReplaceOneModel<Document> request = new ReplaceOneModel<Document>(query, doc); request.getOptions().upsert(true); requests.add(request); } catch (InstantiationException e) { throw new IOException(e); } } // Bulk write the collection's request for (Map.Entry<String, ArrayList<WriteModel>> entry : collectionRequests.entrySet()) { String[] name = entry.getKey().split("_"); ArrayList<WriteModel> requests = entry.getValue(); MongoCollection mongoCollection = this.getCollection(name[0], name[1]); if (mongoCollection == null) { throw new RuntimeErrorException(new Error("Could not store into collection " + entry.getKey())); } BulkWriteOptions options = new BulkWriteOptions(); options.ordered(false); mongoCollection.bulkWrite(requests, options); } }
From source file:net.es.netshell.mongodb.MongoDBProvider.java
License:Open Source License
@Override public final void delete(List<ResourceAnchor> anchors) throws IOException { if (!KernelThread.currentKernelThread().isPrivileged()) { throw new SecurityException("delete db list - not authorized"); }//from w w w . jav a 2 s.c o m HashMap<String, ArrayList<WriteModel>> collectionRequests = new HashMap<String, ArrayList<WriteModel>>(); // Build the bulk requests per collection for (ResourceAnchor anchor : anchors) { String user = anchor.getContainerOwner(); String collection = anchor.getContainerName(); String collectionName = user + "_" + collection; ArrayList<WriteModel> requests; if (!collectionRequests.containsKey(collectionName)) { requests = new ArrayList<WriteModel>(); collectionRequests.put(collectionName, requests); } else { requests = collectionRequests.get(collectionName); } Document query = new Document("resourceName", anchor.getResourceName()); DeleteOneModel request = new DeleteOneModel(query); ; requests.add(request); } // Bulk delete the collection's request for (Map.Entry<String, ArrayList<WriteModel>> entry : collectionRequests.entrySet()) { String[] name = entry.getKey().split("_"); ArrayList<WriteModel> requests = entry.getValue(); MongoCollection mongoCollection = this.getCollection(name[0], name[1]); if (mongoCollection == null) { throw new RuntimeErrorException(new Error("Could not delete into collection " + entry.getKey())); } BulkWriteOptions options = new BulkWriteOptions(); options.ordered(false); mongoCollection.bulkWrite(requests, options); } }
From source file:uk.ac.ebi.eva.dbmigration.mongodb.ExtractAnnotationFromVariant.java
License:Apache License
@ChangeSet(order = "001", id = "migrateAnnotation", author = "EVA") public void migrateAnnotation(MongoDatabase mongoDatabase) { final MongoCollection<Document> variantsCollection = mongoDatabase .getCollection(databaseParameters.getDbCollectionsVariantsName()); final MongoCollection<Document> annotationCollection = mongoDatabase .getCollection(databaseParameters.getDbCollectionsAnnotationsName()); logger.info("1) migrate annotation from collection {}", variantsCollection.getNamespace()); long annotationsReadCount = 0; long annotationsWrittenCount = 0; BulkWriteOptions unorderedBulk = new BulkWriteOptions().ordered(false); Document onlyAnnotatedVariants = new Document(ANNOT_FIELD, EXISTS); try (MongoCursor<Document> cursor = variantsCollection.find(onlyAnnotatedVariants).iterator()) { while (true) { List<InsertOneModel<Document>> annotationsToInsert = getBatch(cursor, BULK_SIZE).stream() .map(this::buildInsertionDocument).collect(toList()); if (annotationsToInsert.isEmpty()) { break; }//from w w w .j a va 2 s . co m annotationsReadCount += annotationsToInsert.size(); BulkWriteResult bulkInsert = annotationCollection.bulkWrite(annotationsToInsert, unorderedBulk); annotationsWrittenCount += bulkInsert.getInsertedCount(); } } //before executing the next changeSet check that the count of read and written annotation documents match if (annotationsReadCount != annotationsWrittenCount) { throw new RuntimeException("The number of processed Variants (" + annotationsReadCount + ") is different from the number of new annotation inserted (" + annotationsWrittenCount + "). The '" + ANNOT_FIELD + "' field will not be removed from the " + variantsCollection.getNamespace() + " collection."); } }
From source file:uk.ac.ebi.eva.dbmigration.mongodb.ExtractAnnotationFromVariant.java
License:Apache License
@ChangeSet(order = "003", id = "reduceAnnotationFromVariants", author = "EVA") public void reduceAnnotationFromVariants(MongoDatabase mongoDatabase) { final MongoCollection<Document> variantsCollection = mongoDatabase .getCollection(databaseParameters.getDbCollectionsVariantsName()); logger.info("3) reduce annotation field from collection {}", variantsCollection.getNamespace()); long annotationsReadCount = 0; long annotationsUpdatedCount = 0; BulkWriteOptions unorderedBulk = new BulkWriteOptions().ordered(false); Document onlyAnnotatedVariants = new Document(ANNOT_FIELD, EXISTS); try (MongoCursor<Document> cursor = variantsCollection.find(onlyAnnotatedVariants).iterator()) { while (true) { List<UpdateOneModel<Document>> annotationsToUpdate = getBatch(cursor, BULK_SIZE).stream() .map(this::buildUpdateDocument).collect(toList()); if (annotationsToUpdate.isEmpty()) { break; }/* w w w . j a va 2 s .c o m*/ annotationsReadCount += annotationsToUpdate.size(); BulkWriteResult bulkInsert = variantsCollection.bulkWrite(annotationsToUpdate, unorderedBulk); annotationsUpdatedCount += bulkInsert.getModifiedCount(); } } if (annotationsReadCount != annotationsUpdatedCount) { throw new RuntimeException("The number of processed Variants (" + annotationsReadCount + ") is different from the number of annotation " + "updated (" + annotationsUpdatedCount + ")."); } }