Example usage for com.mongodb.bulk BulkWriteResult getModifiedCount

List of usage examples for com.mongodb.bulk BulkWriteResult getModifiedCount

Introduction

In this page you can find the example usage for com.mongodb.bulk BulkWriteResult getModifiedCount.

Prototype

public abstract int getModifiedCount();

Source Link

Document

Returns the number of documents modified by the write operation.

Usage

From source file:com.erudika.para.persistence.MongoDBDAO.java

License:Apache License

@Override
public <P extends ParaObject> void updateAll(String appid, List<P> objects) {
    if (StringUtils.isBlank(appid) || objects == null) {
        return;/* ww w. ja va2  s . c  o m*/
    }
    try {
        ArrayList<WriteModel<Document>> updates = new ArrayList<WriteModel<Document>>();
        List<String> ids = new ArrayList<String>(objects.size());
        for (P object : objects) {
            if (object != null) {
                object.setUpdated(Utils.timestamp());
                Document id = new Document(_ID, object.getId());
                Document data = new Document("$set", toRow(object, Locked.class, true));
                UpdateOneModel<Document> um = new UpdateOneModel<Document>(id, data);
                updates.add(um);
                ids.add(object.getId());
            }
        }
        BulkWriteResult res = getTable(appid).bulkWrite(updates, new BulkWriteOptions().ordered(true));
        logger.debug("Updated: " + res.getModifiedCount() + ", keys: " + ids);
    } catch (Exception e) {
        logger.error(null, e);
    }
    logger.debug("DAO.updateAll() {}", (objects == null) ? 0 : objects.size());
}

From source file:com.streamsets.pipeline.stage.destination.mongodb.MongoDBTarget.java

License:Apache License

@Override
public void write(Batch batch) throws StageException {
    Iterator<Record> records = batch.getRecords();
    List<WriteModel<Document>> documentList = new ArrayList<>();
    List<Record> recordList = new ArrayList<>();
    while (records.hasNext()) {
        Record record = records.next();//from  w ww  .  j  a v  a 2 s  .  co  m
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream(DEFAULT_CAPACITY);
            DataGenerator generator = generatorFactory.getGenerator(baos);
            generator.write(record);
            generator.close();
            Document document = Document.parse(new String(baos.toByteArray()));

            //create a write model based on record header
            if (!record.getHeader().getAttributeNames().contains(OPERATION_KEY)) {
                LOG.error(Errors.MONGODB_15.getMessage(), record.getHeader().getSourceId());
                throw new OnRecordErrorException(Errors.MONGODB_15, record.getHeader().getSourceId());
            }

            String operation = record.getHeader().getAttribute(OPERATION_KEY);
            switch (operation) {
            case INSERT:
                documentList.add(new InsertOneModel<>(document));
                recordList.add(record);
                break;
            case UPSERT:
                validateUniqueKey(operation, record);
                recordList.add(record);
                documentList.add(new ReplaceOneModel<>(
                        new Document(removeLeadingSlash(mongoTargetConfigBean.uniqueKeyField),
                                record.get(mongoTargetConfigBean.uniqueKeyField).getValueAsString()),
                        document, new UpdateOptions().upsert(true)));
                break;
            case DELETE:
                recordList.add(record);
                documentList.add(new DeleteOneModel<Document>(document));
                break;
            default:
                LOG.error(Errors.MONGODB_14.getMessage(), operation, record.getHeader().getSourceId());
                throw new StageException(Errors.MONGODB_14, operation, record.getHeader().getSourceId());
            }
        } catch (IOException | StageException e) {
            errorRecordHandler.onError(new OnRecordErrorException(record, Errors.MONGODB_13, e.toString(), e));
        }
    }

    if (!documentList.isEmpty()) {
        try {
            BulkWriteResult bulkWriteResult = coll.bulkWrite(documentList);
            if (bulkWriteResult.wasAcknowledged()) {
                LOG.trace("Wrote batch with {} inserts, {} updates and {} deletes",
                        bulkWriteResult.getInsertedCount(), bulkWriteResult.getModifiedCount(),
                        bulkWriteResult.getDeletedCount());
            }
        } catch (MongoException e) {
            for (Record record : recordList) {
                errorRecordHandler
                        .onError(new OnRecordErrorException(record, Errors.MONGODB_17, e.toString(), e));
            }
        }
    }
}

From source file:org.eclipse.ditto.services.thingsearch.persistence.write.streaming.SearchUpdaterStream.java

License:Open Source License

private static String logResult(final BulkWriteResult bulkWriteResult) {
    return String.format("BulkWriteResult[matched=%d,upserts=%d,inserted=%d,modified=%d,deleted=%d]",
            bulkWriteResult.getMatchedCount(), bulkWriteResult.getUpserts().size(),
            bulkWriteResult.getInsertedCount(), bulkWriteResult.getModifiedCount(),
            bulkWriteResult.getDeletedCount());
}

From source file:org.opencb.cellbase.lib.db.variation.VariationMongoDBAdaptor.java

License:Apache License

private int updatePopulationFrequencies(List<Document> variantDocumentList) {

    List<Bson> queries = new ArrayList<>(variantDocumentList.size());
    List<Bson> updates = new ArrayList<>(variantDocumentList.size());

    for (Document variantDBObject : variantDocumentList) {
        Document annotationDBObject = (Document) variantDBObject.get("annotation");
        Document push = new Document(POP_FREQUENCIES_FIELD, annotationDBObject.get("populationFrequencies"));

        // Remove annotation object from the DBObject so that push and setOnInsert do not update the same fields:
        // i.e. annotation.populationFrequencies and annotation
        variantDBObject.remove("annotation");
        addChunkId(variantDBObject);/*from w  w  w .  jav a2s .c  om*/

        Document update = new Document().append("$pushAll", push).append("$setOnInsert", variantDBObject);

        updates.add(update);

        String chunkId = getChunkIdPrefix((String) variantDBObject.get("chromosome"),
                (int) variantDBObject.get("start"), variationChunkSize);
        queries.add(new Document("_chunkIds", chunkId).append("chromosome", variantDBObject.get("chromosome"))
                .append("start", variantDBObject.get("start"))
                .append("reference", variantDBObject.get("reference"))
                .append("alternate", variantDBObject.get("alternate")));
    }

    BulkWriteResult bulkWriteResult;
    if (!queries.isEmpty()) {
        logger.info("updating object");
        QueryOptions options = new QueryOptions("upsert", true);
        options.put("multi", false);
        try {
            bulkWriteResult = mongoDBCollection.update(queries, updates, options).first();
        } catch (BulkWriteException e) {
            throw e;
        }
        logger.info("{} object updated",
                bulkWriteResult.getUpserts().size() + bulkWriteResult.getModifiedCount());
        return bulkWriteResult.getUpserts().size() + bulkWriteResult.getModifiedCount();
    }
    logger.info("no object updated");
    return 0;

}

From source file:org.opencb.opencga.storage.mongodb.variant.adaptors.VariantMongoDBAdaptor.java

License:Apache License

@Override
public QueryResult updateStats(List<VariantStatsWrapper> variantStatsWrappers,
        StudyConfiguration studyConfiguration, QueryOptions options) {
    //        MongoCollection<Document> coll = db.getDb().getCollection(collectionName);
    //        BulkWriteOperation pullBuilder = coll.initializeUnorderedBulkOperation();
    //        BulkWriteOperation pushBuilder = coll.initializeUnorderedBulkOperation();

    List<Bson> pullQueriesBulkList = new LinkedList<>();
    List<Bson> pullUpdatesBulkList = new LinkedList<>();

    List<Bson> pushQueriesBulkList = new LinkedList<>();
    List<Bson> pushUpdatesBulkList = new LinkedList<>();

    long start = System.nanoTime();
    DocumentToVariantStatsConverter statsConverter = new DocumentToVariantStatsConverter(
            studyConfigurationManager);// w w w . j a v  a2s . c  o m
    //        VariantSource variantSource = queryOptions.get(VariantStorageEngine.VARIANT_SOURCE, VariantSource.class);
    DocumentToVariantConverter variantConverter = getDocumentToVariantConverter(new Query(), options);
    boolean overwrite = options.getBoolean(VariantStorageEngine.Options.OVERWRITE_STATS.key(), false);
    //TODO: Use the StudyConfiguration to change names to ids

    // TODO make unset of 'st' if already present?
    for (VariantStatsWrapper wrapper : variantStatsWrappers) {
        Map<String, VariantStats> cohortStats = wrapper.getCohortStats();
        Iterator<VariantStats> iterator = cohortStats.values().iterator();
        VariantStats variantStats = iterator.hasNext() ? iterator.next() : null;
        List<Document> cohorts = statsConverter.convertCohortsToStorageType(cohortStats,
                studyConfiguration.getStudyId()); // TODO
        // remove when we remove fileId
        //            List cohorts = statsConverter.convertCohortsToStorageType(cohortStats, variantSource.getStudyId());   // TODO use when we
        // remove fileId

        // add cohorts, overwriting old values if that cid, fid and sid already exists: remove and then add
        // db.variants.update(
        //      {_id:<id>},
        //      {$pull:{st:{cid:{$in:["Cohort 1","cohort 2"]}, fid:{$in:["file 1", "file 2"]}, sid:{$in:["study 1", "study 2"]}}}}
        // )
        // db.variants.update(
        //      {_id:<id>},
        //      {$push:{st:{$each: [{cid:"Cohort 1", fid:"file 1", ... , defaultValue:3},{cid:"Cohort 2", ... , defaultValue:3}] }}}
        // )

        if (!cohorts.isEmpty()) {
            String id = variantConverter.buildStorageId(wrapper.getChromosome(), wrapper.getPosition(),
                    variantStats.getRefAllele(), variantStats.getAltAllele());

            Document find = new Document("_id", id);
            if (overwrite) {
                List<Document> idsList = new ArrayList<>(cohorts.size());
                for (Document cohort : cohorts) {
                    Document ids = new Document()
                            .append(DocumentToVariantStatsConverter.COHORT_ID,
                                    cohort.get(DocumentToVariantStatsConverter.COHORT_ID))
                            .append(DocumentToVariantStatsConverter.STUDY_ID,
                                    cohort.get(DocumentToVariantStatsConverter.STUDY_ID));
                    idsList.add(ids);
                }
                Document pull = new Document("$pull",
                        new Document(DocumentToVariantConverter.STATS_FIELD, new Document("$or", idsList)));
                pullQueriesBulkList.add(find);
                pullUpdatesBulkList.add(pull);
            }

            Document push = new Document("$push",
                    new Document(DocumentToVariantConverter.STATS_FIELD, new Document("$each", cohorts)));
            pushQueriesBulkList.add(find);
            pushUpdatesBulkList.add(push);
        }
    }

    // TODO handle if the variant didn't had that studyId in the files array
    // TODO check the substitution is done right if the stats are already present
    if (overwrite) {
        variantsCollection.update(pullQueriesBulkList, pullUpdatesBulkList, new QueryOptions());
    }
    BulkWriteResult writeResult = variantsCollection
            .update(pushQueriesBulkList, pushUpdatesBulkList, new QueryOptions()).first();
    int writes = writeResult.getModifiedCount();

    return new QueryResult<>("", ((int) (System.nanoTime() - start)), writes, writes, "", "",
            Collections.singletonList(writeResult));
}

From source file:org.opencb.opencga.storage.mongodb.variant.adaptors.VariantMongoDBAdaptor.java

License:Apache License

/**
 * Two steps insertion://w w w.jav  a  2s.c  om
 * First check that the variant and study exists making an update.
 * For those who doesn't exist, pushes a study with the file and genotype information
 * <p>
 * The documents that throw a "dup key" exception are those variants that exist and have the study.
 * Then, only for those variants, make a second update.
 * <p>
 * *An interesting idea would be to invert this actions depending on the number of already inserted variants.
 *
 * @param data                        Variants to insert
 * @param fileId                      File ID
 * @param variantConverter            Variant converter to be used
 * @param variantSourceEntryConverter Variant source converter to be used
 * @param studyConfiguration          Configuration for the study
 * @param loadedSampleIds             Other loaded sampleIds EXCEPT those that are going to be loaded
 * @return QueryResult object
 */
QueryResult<MongoDBVariantWriteResult> insert(List<Variant> data, int fileId,
        DocumentToVariantConverter variantConverter,
        DocumentToStudyVariantEntryConverter variantSourceEntryConverter, StudyConfiguration studyConfiguration,
        List<Integer> loadedSampleIds) {

    MongoDBVariantWriteResult writeResult = new MongoDBVariantWriteResult();
    long startTime = System.currentTimeMillis();
    if (data.isEmpty()) {
        return new QueryResult<>("insertVariants", 0, 1, 1, "", "", Collections.singletonList(writeResult));
    }
    List<Bson> queries = new ArrayList<>(data.size());
    List<Bson> updates = new ArrayList<>(data.size());
    // Use a multiset instead of a normal set, to keep tracking of duplicated variants
    Multiset<String> nonInsertedVariants = HashMultiset.create();
    String fileIdStr = Integer.toString(fileId);

    //        List<String> extraFields = studyConfiguration.getAttributes().getAsStringList(VariantStorageEngine.Options.EXTRA_GENOTYPE_FIELDS
    //                .key());
    boolean excludeGenotypes = studyConfiguration.getAttributes().getBoolean(
            VariantStorageEngine.Options.EXCLUDE_GENOTYPES.key(),
            VariantStorageEngine.Options.EXCLUDE_GENOTYPES.defaultValue());

    long nanoTime = System.nanoTime();
    Map missingSamples = Collections.emptyMap();
    String defaultGenotype = studyConfiguration.getAttributes().getString(DEFAULT_GENOTYPE.key(), "");
    if (defaultGenotype.equals(DocumentToSamplesConverter.UNKNOWN_GENOTYPE)) {
        logger.debug("Do not need fill gaps. DefaultGenotype is UNKNOWN_GENOTYPE({}).",
                DocumentToSamplesConverter.UNKNOWN_GENOTYPE);
    } else if (excludeGenotypes) {
        logger.debug("Do not need fill gaps. Excluding genotypes.");
    } else if (!loadedSampleIds.isEmpty()) {
        missingSamples = new Document(DocumentToSamplesConverter.UNKNOWN_GENOTYPE, loadedSampleIds); // ?/?
    }
    //            List<Object> missingOtherValues = new ArrayList<>(loadedSampleIds.size());
    //            for (int i = 0; i < loadedSampleIds.size(); i++) {
    //                missingOtherValues.add(DBObjectToSamplesConverter.UNKNOWN_FIELD);
    //            }
    for (Variant variant : data) {
        if (variant.getType().equals(VariantType.NO_VARIATION)) {
            //Storage-MongoDB is not able to store NON VARIANTS
            writeResult.setSkippedVariants(writeResult.getSkippedVariants() + 1);
            continue;
        } else if (variant.getType().equals(VariantType.SYMBOLIC)) {
            logger.warn("Skip symbolic variant " + variant.toString());
            writeResult.setSkippedVariants(writeResult.getSkippedVariants() + 1);
            continue;
        }
        String id = variantConverter.buildStorageId(variant);
        for (StudyEntry studyEntry : variant.getStudies()) {
            if (studyEntry.getFiles().size() == 0
                    || !studyEntry.getFiles().get(0).getFileId().equals(fileIdStr)) {
                continue;
            }
            int studyId = studyConfiguration.getStudyId();
            Document study = variantSourceEntryConverter.convertToStorageType(variant, studyEntry);
            Document genotypes = study.get(DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD,
                    Document.class);
            if (genotypes != null) { //If genotypes is null, genotypes are not suppose to be loaded
                genotypes.putAll(missingSamples); //Add missing samples
                //                        for (String extraField : extraFields) {
                //                            List<Object> otherFieldValues = (List<Object>) study.get(extraField.toLowerCase());
                //                            otherFieldValues.addAll(0, missingOtherValues);
                //                        }
            }
            Document push = new Document(DocumentToVariantConverter.STUDIES_FIELD, study);
            Document update = new Document().append("$push", push).append("$setOnInsert",
                    variantConverter.convertToStorageType(variant));
            if (variant.getIds() != null && !variant.getIds().isEmpty()
                    && !variant.getIds().iterator().next().isEmpty()) {
                update.put("$addToSet", new Document(DocumentToVariantConverter.IDS_FIELD,
                        new Document("$each", variant.getIds())));
            }
            // { _id: <variant_id>, "studies.sid": {$ne: <studyId> } }
            //If the variant exists and contains the study, this find will fail, will try to do the upsert, and throw a
            // duplicated key exception.
            queries.add(new Document("_id", id).append(
                    DocumentToVariantConverter.STUDIES_FIELD + "."
                            + DocumentToStudyVariantEntryConverter.STUDYID_FIELD,
                    new Document("$ne", studyId)));
            updates.add(update);
        }
    }

    //
    if (!queries.isEmpty()) {
        QueryOptions options = new QueryOptions(UPSERT, true);
        options.put(MULTI, false);
        int newDocuments;
        int updatedObjects;

        try {
            BulkWriteResult bulkWriteResult;
            bulkWriteResult = variantsCollection.update(queries, updates, options).first();
            newDocuments = bulkWriteResult.getUpserts().size();
            updatedObjects = bulkWriteResult.getModifiedCount();
        } catch (MongoBulkWriteException e) {
            BulkWriteResult bulkWriteResult;
            bulkWriteResult = e.getWriteResult();
            newDocuments = bulkWriteResult.getUpserts().size();
            updatedObjects = bulkWriteResult.getModifiedCount();
            for (BulkWriteError writeError : e.getWriteErrors()) {
                if (writeError.getCode() == 11000) { //Dup Key error code
                    Matcher matcher = writeResultErrorPattern.matcher(writeError.getMessage());
                    if (matcher.find()) {
                        String id = matcher.group(1);
                        nonInsertedVariants.add(id);
                    } else {
                        throw e;
                    }
                } else {
                    throw e;
                }
            }
        }

        writeResult.setNewVariants(newDocuments);
        writeResult.setUpdatedVariants(updatedObjects);
        //                writeResult.setNewDocuments(data.size() - nonInsertedVariants.size() - writeResult.getSkippedVariants());
        queries.clear();
        updates.clear();
    }
    writeResult.setNewVariantsNanoTime(System.nanoTime() - nanoTime);
    nanoTime = System.nanoTime();

    for (Variant variant : data) {
        variant.setAnnotation(null);
        String id = variantConverter.buildStorageId(variant);

        if (nonInsertedVariants != null && !nonInsertedVariants.contains(id)) {
            continue; //Already inserted variant
        }

        for (StudyEntry studyEntry : variant.getStudies()) {
            if (studyEntry.getFiles().size() == 0
                    || !studyEntry.getFiles().get(0).getFileId().equals(fileIdStr)) {
                continue;
            }

            Document studyObject = variantSourceEntryConverter.convertToStorageType(variant, studyEntry);
            Document genotypes = studyObject.get(DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD,
                    Document.class);
            Document push = new Document();

            if (!excludeGenotypes) {
                if (genotypes != null) { //If genotypes is null, genotypes are not suppose to be loaded
                    for (String genotype : genotypes.keySet()) {
                        push.put(
                                DocumentToVariantConverter.STUDIES_FIELD + ".$."
                                        + DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD + "." + genotype,
                                new Document("$each", genotypes.get(genotype)));
                    }
                    //                    for (String extraField : extraFields) {
                    //                        List values = (List) studyObject.get(extraField.toLowerCase());
                    //                        push.put(DBObjectToVariantConverter.STUDIES_FIELD + ".$." + extraField.toLowerCase(),
                    //                                new Document("$each", values).append("$position", loadedSampleIds.size()));
                    //                    }
                } else {
                    push.put(
                            DocumentToVariantConverter.STUDIES_FIELD + ".$."
                                    + DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD,
                            Collections.emptyMap());
                }
            }
            push.put(
                    DocumentToVariantConverter.STUDIES_FIELD + ".$."
                            + DocumentToStudyVariantEntryConverter.FILES_FIELD,
                    ((List) studyObject.get(DocumentToStudyVariantEntryConverter.FILES_FIELD)).get(0));
            Document update = new Document(new Document("$push", push));

            queries.add(new Document("_id", id)
                    .append(DocumentToVariantConverter.STUDIES_FIELD + '.'
                            + DocumentToStudyVariantEntryConverter.STUDYID_FIELD,
                            studyConfiguration.getStudyId())
                    .append(DocumentToVariantConverter.STUDIES_FIELD + '.'
                            + DocumentToStudyVariantEntryConverter.FILES_FIELD + '.'
                            + DocumentToStudyVariantEntryConverter.FILEID_FIELD, new Document("$ne", fileId)));
            updates.add(update);

        }
    }
    writeResult.setExistingVariantsNanoTime(System.nanoTime() - nanoTime);

    if (!queries.isEmpty()) {
        QueryOptions options = new QueryOptions(UPSERT, false);
        options.put(MULTI, false);
        QueryResult<BulkWriteResult> update = variantsCollection.update(queries, updates, options);
        // Can happen that nonInsertedVariantsNum != queries.size() != nonInsertedVariants.size() if there was
        // a duplicated variant.
        writeResult.setNonInsertedVariants(nonInsertedVariants.size() - update.first().getMatchedCount());
        writeResult.setUpdatedVariants(writeResult.getUpdatedVariants() + update.first().getModifiedCount());
    }

    return new QueryResult<>("insertVariants", ((int) (System.currentTimeMillis() - startTime)), 1, 1, "", "",
            Collections.singletonList(writeResult));
}

From source file:org.opencb.opencga.storage.mongodb.variant.load.stage.MongoDBVariantStageLoader.java

License:Apache License

/**
 * Given a map of id -> binary[], inserts the binary objects in the stage collection.
 *
 * {/*  ww w  .ja  v  a  2 s  . c om*/
 *     <studyId> : {
 *         <fileId> : [ BinData(), BinData() ]
 *     }
 * }
 *
 * The field <fileId> is an array to detect duplicated variants within the same file.
 *
 * It may happen that an update with upsert:true fail if two different threads try to
 * update the same non existing document.
 * See https://jira.mongodb.org/browse/SERVER-14322
 *
 * In that case, the non inserted values will be returned.
 *
 * @param values        Map with all the values to insert
 * @param result        MongoDBVariantWriteResult to fill
 * @param retryIds      List of IDs to retry. If not null, only will update those documents within this set
 * @return              List of non updated documents.
 * @throws MongoBulkWriteException if the exception was not a DuplicatedKeyException (e:11000)
 */
private Set<String> updateMongo(ListMultimap<Document, Binary> values, MongoDBVariantWriteResult result,
        Set<String> retryIds) {

    Set<String> nonInsertedIds = Collections.emptySet();
    if (values.isEmpty()) {
        return nonInsertedIds;
    }
    List<Bson> queries = new LinkedList<>();
    List<Bson> updates = new LinkedList<>();
    for (Document id : values.keySet()) {
        if (retryIds == null || retryIds.contains(id.getString("_id"))) {
            List<Binary> binaryList = values.get(id);
            queries.add(eq("_id", id.getString("_id")));
            if (binaryList.size() == 1) {
                updates.add(combine(
                        resumeStageLoad ? addToSet(fieldName, binaryList.get(0))
                                : push(fieldName, binaryList.get(0)),
                        setOnInsert(END_FIELD, id.get(END_FIELD)), setOnInsert(REF_FIELD, id.get(REF_FIELD)),
                        setOnInsert(ALT_FIELD, id.get(ALT_FIELD))));
            } else {
                updates.add(combine(
                        resumeStageLoad ? addEachToSet(fieldName, binaryList) : pushEach(fieldName, binaryList),
                        setOnInsert(END_FIELD, id.get(END_FIELD)), setOnInsert(REF_FIELD, id.get(REF_FIELD)),
                        setOnInsert(ALT_FIELD, id.get(ALT_FIELD))));
            }
        }
    }

    try {
        final BulkWriteResult mongoResult = collection.update(queries, updates, QUERY_OPTIONS).first();
        result.setNewVariants(mongoResult.getInsertedCount())
                .setUpdatedVariants(mongoResult.getModifiedCount());
    } catch (MongoBulkWriteException e) {
        result.setNewVariants(e.getWriteResult().getInsertedCount())
                .setUpdatedVariants(e.getWriteResult().getModifiedCount());

        if (retryIds != null) {
            // If retryIds != null, means that this this was the second attempt to update. In this case, do fail.
            LOGGER.error("BulkWriteErrors when retrying the updates");
            throw e;
        }

        nonInsertedIds = new HashSet<>();
        for (BulkWriteError writeError : e.getWriteErrors()) {
            if (ErrorCategory.fromErrorCode(writeError.getCode()).equals(ErrorCategory.DUPLICATE_KEY)) { //Dup Key error code
                Matcher matcher = DUP_KEY_WRITE_RESULT_ERROR_PATTERN.matcher(writeError.getMessage());
                if (matcher.find()) {
                    String id = matcher.group(1);
                    nonInsertedIds.add(id);
                    LOGGER.warn("Catch error : {}", writeError.toString());
                    LOGGER.warn("DupKey exception inserting '{}'. Retry!", id);
                } else {
                    LOGGER.error("WriteError with code {} does not match with the pattern {}",
                            writeError.getCode(), DUP_KEY_WRITE_RESULT_ERROR_PATTERN.pattern());
                    throw e;
                }
            } else {
                throw e;
            }
        }
    }
    return nonInsertedIds;
}

From source file:org.restheart.handlers.bulk.BulkResultRepresentationFactory.java

License:Open Source License

private void addBulkResult(final BulkOperationResult result, final RequestContext context,
        final Representation rep, final String requestPath) {
    Representation nrep = new Representation();

    BulkWriteResult wr = result.getBulkResult();

    if (wr.wasAcknowledged()) {
        if (wr.getUpserts() != null) {
            nrep.addProperty("inserted", new BsonInt32(wr.getUpserts().size()));

            // add links to new, upserted documents
            wr.getUpserts().stream().forEach(update -> {
                nrep.addLink(//from  w  w w.j a  v a  2  s. c om
                        new Link("rh:newdoc", URLUtils.getReferenceLink(context, requestPath, update.getId())),
                        true);
            });
        }

        nrep.addProperty("deleted", new BsonInt32(wr.getDeletedCount()));

        if (wr.isModifiedCountAvailable()) {
            nrep.addProperty("modified", new BsonInt32(wr.getModifiedCount()));
        }

        nrep.addProperty("matched", new BsonInt32(wr.getMatchedCount()));

        rep.addRepresentation("rh:result", nrep);
    }
}

From source file:org.restheart.handlers.bulk.BulkResultRepresentationFactory.java

License:Open Source License

private void addWriteResult(final BulkWriteResult wr, final Representation rep, final String requestPath) {
    Representation nrep = new Representation();

    if (wr.wasAcknowledged()) {
        if (wr.getUpserts() != null) {
            nrep.addProperty("inserted", new BsonInt32(wr.getUpserts().size()));

            // add links to new, upserted documents
            wr.getUpserts().stream().forEach(update -> {
                nrep.addLink(new Link("rh:newdoc", URLUtils.getReferenceLink(requestPath, update.getId())),
                        true);/* w  ww.  j  a  v  a  2 s. c  o  m*/
            });
        }

        nrep.addProperty("deleted", new BsonInt32(wr.getDeletedCount()));

        if (wr.isModifiedCountAvailable()) {
            nrep.addProperty("modified", new BsonInt32(wr.getModifiedCount()));
        }

        nrep.addProperty("matched", new BsonInt32(wr.getMatchedCount()));

        rep.addRepresentation("rh:result", nrep);
    }
}

From source file:uk.ac.ebi.eva.dbmigration.mongodb.ExtractAnnotationFromVariant.java

License:Apache License

@ChangeSet(order = "003", id = "reduceAnnotationFromVariants", author = "EVA")
public void reduceAnnotationFromVariants(MongoDatabase mongoDatabase) {
    final MongoCollection<Document> variantsCollection = mongoDatabase
            .getCollection(databaseParameters.getDbCollectionsVariantsName());
    logger.info("3) reduce annotation field from collection {}", variantsCollection.getNamespace());

    long annotationsReadCount = 0;
    long annotationsUpdatedCount = 0;
    BulkWriteOptions unorderedBulk = new BulkWriteOptions().ordered(false);
    Document onlyAnnotatedVariants = new Document(ANNOT_FIELD, EXISTS);
    try (MongoCursor<Document> cursor = variantsCollection.find(onlyAnnotatedVariants).iterator()) {
        while (true) {
            List<UpdateOneModel<Document>> annotationsToUpdate = getBatch(cursor, BULK_SIZE).stream()
                    .map(this::buildUpdateDocument).collect(toList());

            if (annotationsToUpdate.isEmpty()) {
                break;
            }/*www .j  a  v  a  2 s.c  om*/
            annotationsReadCount += annotationsToUpdate.size();
            BulkWriteResult bulkInsert = variantsCollection.bulkWrite(annotationsToUpdate, unorderedBulk);
            annotationsUpdatedCount += bulkInsert.getModifiedCount();
        }
    }
    if (annotationsReadCount != annotationsUpdatedCount) {
        throw new RuntimeException("The number of processed Variants (" + annotationsReadCount
                + ") is different from the number of annotation " + "updated (" + annotationsUpdatedCount
                + ").");
    }
}