List of usage examples for com.mongodb BulkWriteOperation execute
public BulkWriteResult execute()
From source file:org.easybatch.extensions.mongodb.MongoDBBatchWriter.java
License:Open Source License
@Override public Batch processRecord(final Batch batch) throws RecordWritingException { List<Record> records = batch.getPayload(); Collection<DBObject> documents = asDocuments(records); BulkWriteOperation bulkWriteOperation = collection.initializeOrderedBulkOperation(); for (DBObject document : documents) { bulkWriteOperation.insert(document); }//from ww w . j av a 2s .c o m try { bulkWriteOperation.execute(); return batch; } catch (Exception e) { throw new RecordWritingException(format("Unable to write documents [%s] to Mongo DB server", documents), e); } }
From source file:org.easybatch.integration.mongodb.MongoDBMultiRecordWriter.java
License:Open Source License
@Override protected void writeRecord(List<DBObject> documents) throws RecordProcessingException { BulkWriteOperation bulkWriteOperation = collection.initializeOrderedBulkOperation(); for (DBObject document : documents) { bulkWriteOperation.insert(document); }/*from w w w . j av a 2s . co m*/ try { bulkWriteOperation.execute(); } catch (Exception e) { throw new RecordProcessingException( format("Unable to write documents [%s] to Mongo DB server", documents), e); } }
From source file:org.fastmongo.odm.repository.MongoTemplate.java
License:Apache License
@Override public BulkWriteResult save(Class<?> collectionClass, Collection<DBObject> dbObjects) { BulkWriteOperation bulk = getCollection(collectionClass).initializeUnorderedBulkOperation(); for (DBObject dbObject : dbObjects) { DBObject idQuery = new BasicDBObject(); idQuery.put(OBJECT_ID_KEY, dbObject.get(OBJECT_ID_KEY)); bulk.find(idQuery).upsert().replaceOne(dbObject); }// w w w .ja v a 2s.com return bulk.execute(); }
From source file:org.opencb.opencga.storage.mongodb.variant.VariantMongoDBAdaptor.java
License:Apache License
@Override public QueryResult updateStats(List<VariantStatsWrapper> variantStatsWrappers, StudyConfiguration studyConfiguration, QueryOptions options) { DBCollection coll = db.getDb().getCollection(collectionName); BulkWriteOperation builder = coll.initializeUnorderedBulkOperation(); long start = System.nanoTime(); DBObjectToVariantStatsConverter statsConverter = new DBObjectToVariantStatsConverter( studyConfigurationManager);//w ww. ja v a 2s .c om // VariantSource variantSource = queryOptions.get(VariantStorageManager.VARIANT_SOURCE, VariantSource.class); DBObjectToVariantConverter variantConverter = getDbObjectToVariantConverter(new Query(), options); //TODO: Use the StudyConfiguration to change names to ids // TODO make unset of 'st' if already present? for (VariantStatsWrapper wrapper : variantStatsWrappers) { Map<String, VariantStats> cohortStats = wrapper.getCohortStats(); Iterator<VariantStats> iterator = cohortStats.values().iterator(); VariantStats variantStats = iterator.hasNext() ? iterator.next() : null; List<DBObject> cohorts = statsConverter.convertCohortsToStorageType(cohortStats, studyConfiguration.getStudyId()); // TODO remove when we remove fileId // List cohorts = statsConverter.convertCohortsToStorageType(cohortStats, variantSource.getStudyId()); // TODO use when we remove fileId // add cohorts, overwriting old values if that cid, fid and sid already exists: remove and then add // db.variants.update( // {_id:<id>}, // {$pull:{st:{cid:{$in:["Cohort 1","cohort 2"]}, fid:{$in:["file 1", "file 2"]}, sid:{$in:["study 1", "study 2"]}}}} // ) // db.variants.update( // {_id:<id>}, // {$push:{st:{$each: [{cid:"Cohort 1", fid:"file 1", ... , defaultValue:3},{cid:"Cohort 2", ... , defaultValue:3}] }}} // ) if (!cohorts.isEmpty()) { String id = variantConverter.buildStorageId(wrapper.getChromosome(), wrapper.getPosition(), variantStats.getRefAllele(), variantStats.getAltAllele()); List<Integer> cohortIds = new ArrayList<>(cohorts.size()); List<Integer> studyIds = new ArrayList<>(cohorts.size()); for (DBObject cohort : cohorts) { cohortIds.add((Integer) cohort.get(DBObjectToVariantStatsConverter.COHORT_ID)); studyIds.add((Integer) cohort.get(DBObjectToVariantStatsConverter.STUDY_ID)); } DBObject find = new BasicDBObject("_id", id); DBObject update = new BasicDBObject("$pull", new BasicDBObject(DBObjectToVariantConverter.STATS_FIELD, new BasicDBObject() .append(DBObjectToVariantStatsConverter.STUDY_ID, new BasicDBObject("$in", studyIds)) // .append( // DBObjectToVariantStatsConverter.FILE_ID, // new BasicDBObject("$in", fileIds)) .append(DBObjectToVariantStatsConverter.COHORT_ID, new BasicDBObject("$in", cohortIds)))); builder.find(find).updateOne(update); DBObject push = new BasicDBObject("$push", new BasicDBObject(DBObjectToVariantConverter.STATS_FIELD, new BasicDBObject("$each", cohorts))); builder.find(find).update(push); } } // TODO handle if the variant didn't had that studyId in the files array // TODO check the substitution is done right if the stats are already present BulkWriteResult writeResult = builder.execute(); int writes = writeResult.getModifiedCount(); return new QueryResult<>("", ((int) (System.nanoTime() - start)), writes, writes, "", "", Collections.singletonList(writeResult)); }
From source file:org.opencb.opencga.storage.mongodb.variant.VariantMongoDBAdaptor.java
License:Apache License
@Override public QueryResult updateAnnotations(List<VariantAnnotation> variantAnnotations, QueryOptions queryOptions) { DBCollection coll = db.getDb().getCollection(collectionName); BulkWriteOperation builder = coll.initializeUnorderedBulkOperation(); long start = System.nanoTime(); DBObjectToVariantConverter variantConverter = getDbObjectToVariantConverter(new Query(), queryOptions); for (VariantAnnotation variantAnnotation : variantAnnotations) { String id = variantConverter.buildStorageId(variantAnnotation.getChromosome(), variantAnnotation.getStart(), variantAnnotation.getReferenceAllele(), variantAnnotation.getAlternateAllele()); DBObject find = new BasicDBObject("_id", id); DBObjectToVariantAnnotationConverter converter = new DBObjectToVariantAnnotationConverter(); DBObject convertedVariantAnnotation = converter.convertToStorageType(variantAnnotation); DBObject update = new BasicDBObject("$set", new BasicDBObject( DBObjectToVariantConverter.ANNOTATION_FIELD + ".0", convertedVariantAnnotation)); builder.find(find).updateOne(update); }/*from www .j ava 2 s . c o m*/ BulkWriteResult writeResult = builder.execute(); return new QueryResult<>("", ((int) (System.nanoTime() - start)), 1, 1, "", "", Collections.singletonList(writeResult)); }
From source file:org.opencb.opencga.storage.mongodb.variant.VariantMongoDBAdaptor.java
License:Apache License
@Override @Deprecated//from w w w .j av a2 s . c o m public QueryResult updateStats(List<VariantStatsWrapper> variantStatsWrappers, int studyId, QueryOptions queryOptions) { DBCollection coll = db.getDb().getCollection(collectionName); BulkWriteOperation builder = coll.initializeUnorderedBulkOperation(); long start = System.nanoTime(); DBObjectToVariantStatsConverter statsConverter = new DBObjectToVariantStatsConverter( studyConfigurationManager); // VariantSource variantSource = queryOptions.get(VariantStorageManager.VARIANT_SOURCE, VariantSource.class); int fileId = queryOptions.getInt(VariantStorageManager.Options.FILE_ID.key()); DBObjectToVariantConverter variantConverter = getDbObjectToVariantConverter(new Query(queryOptions), queryOptions); //TODO: Use the StudyConfiguration to change names to ids // TODO make unset of 'st' if already present? for (VariantStatsWrapper wrapper : variantStatsWrappers) { Map<String, VariantStats> cohortStats = wrapper.getCohortStats(); Iterator<VariantStats> iterator = cohortStats.values().iterator(); VariantStats variantStats = iterator.hasNext() ? iterator.next() : null; List<DBObject> cohorts = statsConverter.convertCohortsToStorageType(cohortStats, studyId); // TODO remove when we remove fileId // List cohorts = statsConverter.convertCohortsToStorageType(cohortStats, variantSource.getStudyId()); // TODO use when we remove fileId // add cohorts, overwriting old values if that cid, fid and sid already exists: remove and then add // db.variants.update( // {_id:<id>}, // {$pull:{st:{cid:{$in:["Cohort 1","cohort 2"]}, fid:{$in:["file 1", "file 2"]}, sid:{$in:["study 1", "study 2"]}}}} // ) // db.variants.update( // {_id:<id>}, // {$push:{st:{$each: [{cid:"Cohort 1", fid:"file 1", ... , defaultValue:3},{cid:"Cohort 2", ... , defaultValue:3}] }}} // ) if (!cohorts.isEmpty()) { String id = variantConverter.buildStorageId(wrapper.getChromosome(), wrapper.getPosition(), variantStats.getRefAllele(), variantStats.getAltAllele()); List<String> cohortIds = new ArrayList<>(cohorts.size()); List<Integer> fileIds = new ArrayList<>(cohorts.size()); List<Integer> studyIds = new ArrayList<>(cohorts.size()); for (DBObject cohort : cohorts) { cohortIds.add((String) cohort.get(DBObjectToVariantStatsConverter.COHORT_ID)); // fileIds.add((Integer) cohort.get(DBObjectToVariantStatsConverter.FILE_ID)); studyIds.add((Integer) cohort.get(DBObjectToVariantStatsConverter.STUDY_ID)); } DBObject find = new BasicDBObject("_id", id); DBObject update = new BasicDBObject("$pull", new BasicDBObject(DBObjectToVariantConverter.STATS_FIELD, new BasicDBObject() .append(DBObjectToVariantStatsConverter.STUDY_ID, new BasicDBObject("$in", studyIds)) // .append( // DBObjectToVariantStatsConverter.FILE_ID, // new BasicDBObject("$in", fileIds)) .append(DBObjectToVariantStatsConverter.COHORT_ID, new BasicDBObject("$in", cohortIds)))); builder.find(find).updateOne(update); DBObject push = new BasicDBObject("$push", new BasicDBObject(DBObjectToVariantConverter.STATS_FIELD, new BasicDBObject("$each", cohorts))); builder.find(find).update(push); } } // TODO handle if the variant didn't had that studyId in the files array // TODO check the substitution is done right if the stats are already present BulkWriteResult writeResult = builder.execute(); int writes = writeResult.getModifiedCount(); return new QueryResult<>("", ((int) (System.nanoTime() - start)), writes, writes, "", "", Collections.singletonList(writeResult)); }
From source file:org.sglover.entities.dao.mongo.MongoEntitiesDAO.java
License:Open Source License
private void addEntities(Node node, String type, String key, Collection<Entity<String>> entities) { BulkWriteOperation bulk = entitiesData.initializeUnorderedBulkOperation(); String nodeId = node.getNodeId(); long nodeInternalId = node.getNodeInternalId(); String nodeVersion = node.getVersionLabel(); if (entities.size() > 0) { int expected = entities.size(); for (Entity<String> nameEntity : entities) { List<EntityLocation> locations = nameEntity.getLocations(); List<DBObject> locs = new LinkedList<>(); for (EntityLocation location : locations) { long start = location.getBeginOffset(); long end = location.getEndOffset(); String context = location.getContext(); double probability = location.getProbability(); DBObject locDBObject = BasicDBObjectBuilder.start("s", start).add("e", end) .add("p", probability).add("c", context).get(); locs.add(locDBObject);/*from www .j a v a2 s . com*/ } DBObject dbObject = BasicDBObjectBuilder.start("n", nodeId).add("ni", nodeInternalId) .add("v", nodeVersion).add("t", type).add(key, nameEntity.getEntity()) .add("c", nameEntity.getCount()).add("locs", locs).get(); bulk.insert(dbObject); } BulkWriteResult result = bulk.execute(); int inserted = result.getInsertedCount(); if (expected != inserted) { throw new RuntimeException("Mongo write failed"); } } }
From source file:org.springframework.integration.mongodb.store.MongoDbMessageStore.java
License:Apache License
private void bulkRemove(Object groupId, Collection<UUID> ids) { BulkWriteOperation bulkOp = this.template.getCollection(this.collectionName) .initializeOrderedBulkOperation(); for (UUID id : ids) { bulkOp.find(whereMessageIdIsAndGroupIdIs(id, groupId).getQueryObject()).remove(); }/* w w w .ja v a 2s . com*/ bulkOp.execute(); }
From source file:uk.ac.ebi.eva.pipeline.io.writers.VariantMongoWriter.java
License:Apache License
private void executeBulk(BulkWriteOperation bulk, int currentBulkSize) { if (currentBulkSize != 0) { logger.trace("Execute bulk. BulkSize : " + currentBulkSize); bulk.execute(); }/*from w w w. java 2s. c o m*/ }