Example usage for com.mongodb WriteResult getN

List of usage examples for com.mongodb WriteResult getN

Introduction

In this page you can find the example usage for com.mongodb WriteResult getN.

Prototype

public int getN() 

Source Link

Document

Gets the "n" field, which contains the number of documents affected in the write operation.

Usage

From source file:com.tomtom.speedtools.mongodb.DaoUtils.java

License:Apache License

/**
 * Update a document in a collection.// w w w  .  j av a2 s.  co  m
 *
 * @param collection         Collection that contains the document.
 * @param query              Query to find the document.
 * @param update             Update for document.
 * @param updateLastModified True if the last modified time needs to be adjusted as well (to now).
 * @throws EntityStoreException    New document cannot be stored. The error will have been logged.
 * @throws EntityNotFoundException The document was not found. The error will have been logged.
 */
public static void update(@Nonnull final DBCollection collection, @Nonnull final MongoDBQuery query,
        @Nonnull final MongoDBUpdate update, final boolean updateLastModified)
        throws EntityStoreException, EntityNotFoundException {
    assert collection != null;
    assert query != null;
    assert update != null;
    try {

        // Update last modified time.
        if (updateLastModified) {
            update.setRaw(MongoDBKeyNames.LAST_MODIFIED_KEY, UTCTime.now().toDate());
        }

        // Make sure upsert is set to false to not create new records on the fly.
        final WriteResult result = collection.update(query.toDBObject(), update.toDBObject(), NO_UPSERT,
                NO_MULTI, writeConcern);
        if (result.getN() == 0) {
            final String message = "Couldn't find entity to update, query: " + query + ", update: " + update
                    + ", " + "collection: " + collection.getName() + '.';
            LOG.error("update: {}", message);
            throw new EntityNotFoundException(message);
        }
    } catch (final MapperException | MongoException e) {
        final String message = "Couldn't map entity to update, query: " + query + ", update: " + update + ", "
                + "collection: " + collection.getName() + '.';
        LOG.error("update: " + message, e);
        throw new EntityStoreException(message, e);
    }
}

From source file:com.tomtom.speedtools.mongodb.DaoUtils.java

License:Apache License

/**
 * Update or insert a document in a collection.
 *
 * @param collection         Collection that contains the document.
 * @param query              Query to find the document.
 * @param value              Update (or initial value) for document.
 * @param updateLastModified True if the last modified time needs to be adjusted as well (to now).
 * @return Number of records updated (0 if inserted, 1 if updated).
 * @throws EntityStoreException New document cannot be stored. The error will have been logged.
 *//*from w w  w.ja v  a  2  s. c o  m*/
public static int upsert(@Nonnull final DBCollection collection, @Nonnull final MongoDBQuery query,
        @Nonnull final DBObject value, final boolean updateLastModified) throws EntityStoreException {
    assert collection != null;
    assert query != null;
    assert value != null;
    try {

        // Update last modified time.
        if (updateLastModified) {
            value.put(MongoDBKeyNames.LAST_MODIFIED_KEY, UTCTime.now().toDate());
        }

        // Make sure upsert is set to true to create the object if it is not found.
        final WriteResult result = collection.update(query.toDBObject(), value, UPSERT, NO_MULTI, writeConcern);
        final int nr = result.getN();
        if (nr == 0) {
            LOG.debug("upsert: Inserted new object, query={}, collection={}", query, collection.getName());
        }
        return nr;
    } catch (final MapperException | MongoException e) {
        final String message = "Couldn't map entity to update, query: " + query + ", update: " + value + ", "
                + "collection: " + collection.getName() + '.';
        LOG.error("upsert: " + message, e);
        throw new EntityStoreException(message, e);
    }
}

From source file:de.adorsys.tanserver.repository.TANForAccountAndRequestIdRepository.java

License:Apache License

public boolean deleteTAN(String accountId, String requestId, String activationTAN) {
    WriteResult result = deleteByQuery(getDatastore().createQuery(TANForAccountAndRequestId.class)
            .filter("accountId", accountId).filter("requestId", requestId).filter("tan", activationTAN));
    return result.getN() > 0;
}

From source file:eu.mondo.driver.mongo.MongoGraphDriver.java

License:Open Source License

@Override
public void insertVertex(final String vertexType, final String vertexURI) throws IOException {
    String rdfType = RDF_PREFIX + "type";

    WriteResult insert = collection.insert(
            "{ \"subject\": #, \"predicate\": #, \"object\": #, \"subjectBI\": #, \"predicateBI\": #, \"objectBI\": # }",
            vertexURI, rdfType, vertexType, saveNode(vertexURI), saveNode(rdfType), saveNode(vertexType));

    if (isShowCommandOutput()) {
        System.out.println("INSERTED vertex " + vertexURI + " type: " + vertexType + " with " + insert.getN()
                + " modifications.");
    }//from w  w w . ja v  a  2  s  .co m
}

From source file:eu.mondo.driver.mongo.MongoGraphDriver.java

License:Open Source License

@Override
public void insertEdge(final String sourceVertexURI, final String destinationVertexURI, final String edgeURI)
        throws IOException {
    WriteResult insert = collection.insert(
            "{ \"subject\": #, \"predicate\": #, \"object\": #, \"subjectBI\": #, \"predicateBI\": #, \"objectBI\": # }",
            sourceVertexURI, edgeURI, destinationVertexURI, saveNode(sourceVertexURI), saveNode(edgeURI),
            saveNode(destinationVertexURI));

    if (isShowCommandOutput()) {
        System.out.println("INSERTED edge between " + sourceVertexURI + " and " + destinationVertexURI
                + ", type: " + edgeURI + " with " + insert.getN() + " modifications.");
    }//from   w  w w  .  j  a  v  a 2s  .  c  o m
}

From source file:eu.mondo.driver.mongo.MongoGraphDriver.java

License:Open Source License

@Override
public void deleteVertex(final String vertexURI) throws IOException {
    // delete "properties", "outgoing edges" and the vertex itself
    WriteResult remove1 = collection.remove("{ \"subject\": # }", vertexURI);

    // "delete incoming edges"
    WriteResult remove2 = collection.remove("{ \"object\": # }", vertexURI);

    if (isShowCommandOutput()) {
        System.out.println("REMOVED " + vertexURI + " with " + remove1.getN() + remove2.getN() + " tuples.");
    }//from w  w  w.  j  a v a2 s  .c  o  m
}

From source file:eu.mondo.driver.mongo.MongoGraphDriver.java

License:Open Source License

@Override
public void deleteEdge(final String sourceVertexURI, final String destinationVertexURI, final String edgeURI)
        throws IOException {

    WriteResult remove = collection.remove("{ \"subject\": #, \"predicate\": #, \"object\": # }",
            sourceVertexURI, edgeURI, destinationVertexURI);

    if (isShowCommandOutput()) {
        System.out.println("DELETED " + sourceVertexURI + " -- " + edgeURI + " -> " + destinationVertexURI
                + " with " + remove.getN() + " removals.");
    }//from w  w  w .jav a 2s. c  o m

}

From source file:ezbake.data.mongo.EzMongoHandler.java

License:Apache License

@Override
public int remove(String collectionName, String jsonQuery, EzSecurityToken security)
        throws TException, EzMongoBaseException {
    try {//from   ww w.  j av a2s.c  o  m
        HashMap<String, String> auditParamsMap = new HashMap<>();
        auditParamsMap.put("action", "remove");
        auditParamsMap.put("collectionName", collectionName);
        auditParamsMap.put("jsonQuery", jsonQuery);
        auditLog(security, AuditEventType.FileObjectDelete, auditParamsMap);

        TokenUtils.validateSecurityToken(security, this.getConfigurationProperties());

        if (StringUtils.isEmpty(collectionName)) {
            throw new EzMongoBaseException("collectionName is required.");
        }

        final String finalCollectionName = getCollectionName(collectionName);

        int removedCount = 0;

        // see if we are able to remove the data in db with user's classification in the user token
        final List<DBObject> results = mongoFindHelper.findElements(collectionName, jsonQuery, "{ _id: 1}",
                null, 0, 0, false, security, false, WRITE_OPERATION);
        if (results.size() > 0) {

            // construct a list of Objects to use as the filter
            final List<Object> idList = new ArrayList<Object>();
            for (final DBObject result : results) {
                appLog.info("can remove DBObject (_id): {}", result);

                idList.add(result.get("_id"));
            }

            final DBObject inClause = new BasicDBObject("$in", idList);
            final DBObject query = new BasicDBObject("_id", inClause);

            Timer.Context context = getMetricRegistry().getTimers().get(REMOVE_TIMER_NAME).time();

            try {
                final WriteResult writeResult = db.getCollection(finalCollectionName).remove(query);

                appLog.info("removed - write result: {}", writeResult.toString());

                removedCount = writeResult.getN();
            } finally {
                context.stop();
            }
        } else {
            appLog.info("Did not find any documents to remove with the query {}", jsonQuery);
        }

        appLog.info("after remove, removedCount: {}", removedCount);

        return removedCount;
    } catch (final Exception e) {
        throw enrichException("remove", e);
    }
}

From source file:ezbake.data.mongo.helper.MongoUpdateHelper.java

License:Apache License

public int updateContent(String finalCollectionName, DBObject query, DBObject content, boolean upsert,
        boolean multi) {

    final WriteResult writeResult = ezMongoHandler.getDb().getCollection(finalCollectionName).update(query,
            content, upsert, multi);//from  w w w  . j a v  a2s.  co m

    appLog.info("updated - write result: {}", writeResult.toString());

    return writeResult.getN();
}

From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java

License:Open Source License

/**
 * Find variants.//from  w w w.ja v a 2  s .co m
 *
 * @param request the request
 * @param sModule the module
 * @param projId the proj id
 * @param selectedVariantTypes the selected variant types
 * @param selectedSequences the selected sequences
 * @param selectedIndividuals the selected individuals
 * @param gtPattern the gt code
 * @param genotypeQualityThreshold the genotype quality threshold
 * @param readDepthThreshold the read depth threshold
 * @param missingData the missing data
 * @param minmaf the minmaf
 * @param maxmaf the maxmaf
 * @param minposition the minposition
 * @param maxposition the maxposition
 * @param alleleCount the allele count
 * @param geneName the gene name
 * @param variantEffects the variant effects
 * @param wantedFields the wanted fields
 * @param page the page
 * @param size the size
 * @param sortBy the sort by
 * @param sortDir the sort dir
 * @param processID the process id
 * @return true, if successful
 * @throws Exception the exception
 */
@RequestMapping(variantFindURL)
/**
 *  This method build a list of variants in a temporary collection, that may be used later for browsing or exporting results
 */
protected @ResponseBody boolean findVariants(HttpServletRequest request, @RequestParam("module") String sModule,
        @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes,
        @RequestParam("sequences") String selectedSequences,
        @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern,
        @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold,
        @RequestParam("readDepthThreshold") int readDepthThreshold,
        @RequestParam("missingData") double missingData, @RequestParam("minmaf") Float minmaf,
        @RequestParam("maxmaf") Float maxmaf, @RequestParam("minposition") Long minposition,
        @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount,
        @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects,
        @RequestParam("wantedFields") String wantedFields, @RequestParam("page") int page,
        @RequestParam("size") int size, @RequestParam("sortBy") String sortBy,
        @RequestParam("sortDir") String sortDir, @RequestParam("processID") String processID) throws Exception {
    long before = System.currentTimeMillis();

    String token = processID.substring(1 + processID.indexOf('|'));

    final ProgressIndicator progress = new ProgressIndicator(token, new String[0]);
    ProgressIndicator.registerProgressIndicator(progress);
    progress.addStep("Loading results");

    String actualSequenceSelection = selectedSequences;
    if (actualSequenceSelection.length() == 0) {
        ArrayList<String> externallySelectedSeqs = getSequenceIDsBeingFilteredOn(request, sModule);
        if (externallySelectedSeqs != null)
            actualSequenceSelection = StringUtils.join(externallySelectedSeqs, ";");
    }

    List<String> selectedSequenceList = actualSequenceSelection.length() == 0 ? null
            : Arrays.asList(actualSequenceSelection.split(";"));
    String queryKey = getQueryKey(request, sModule, projId, selectedVariantTypes, selectedSequences,
            selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf,
            maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects);

    final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    DBCollection cachedCountCollection = mongoTemplate.getCollection(MgdbDao.COLLECTION_NAME_CACHED_COUNTS);
    DBCursor countCursor = cachedCountCollection.find(new BasicDBObject("_id", queryKey));

    final DBCollection variantColl = mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantData.class));
    final Object[] partialCountArray = !countCursor.hasNext() ? null
            : ((BasicDBList) countCursor.next().get(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE)).toArray();

    final DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, progress.getProcessId(), false);

    String sRegexOrAggregationOperator = GenotypingDataQueryBuilder.getGenotypePatternToQueryMap()
            .get(gtPattern);
    boolean fNeedToFilterOnGenotypingData = needToFilterOnGenotypingData(sModule, projId,
            sRegexOrAggregationOperator, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf,
            maxmaf, geneName, variantEffects);
    final BasicDBList variantQueryDBList = buildVariantDataQuery(sModule, projId,
            selectedVariantTypes.length() == 0 ? null : Arrays.asList(selectedVariantTypes.split(";")),
            selectedSequenceList, minposition, maxposition,
            alleleCount.length() == 0 ? null : Arrays.asList(alleleCount.split(";")));

    if (!variantQueryDBList.isEmpty()
            && tmpVarColl.count() == 0 /* otherwise we kept the preliminary list from the count procedure */) { // apply filter on variant features
        progress.setProgressDescription("Filtering variants for display...");
        long beforeAggQuery = System.currentTimeMillis();
        List<DBObject> pipeline = new ArrayList<DBObject>();
        pipeline.add(new BasicDBObject("$match", new BasicDBObject("$and", variantQueryDBList)));
        BasicDBObject projectObject = new BasicDBObject("_id", "$_id");
        projectObject.put(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE,
                "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE);
        projectObject.put(
                VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE,
                "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE);
        projectObject.put(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_END_SITE,
                "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_END_SITE);
        projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE);
        projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST,
                "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST);
        pipeline.add(new BasicDBObject("$project", projectObject));

        pipeline.add(new BasicDBObject("$out", tmpVarColl.getName()));
        variantColl.aggregate(pipeline);

        LOG.debug("Variant preliminary query found " + tmpVarColl.count() + " results in "
                + (System.currentTimeMillis() - beforeAggQuery) / 1000f + "s");
        progress.setProgressDescription(null);
    } else if (fNeedToFilterOnGenotypingData && tmpVarColl.count() > 0)
        LOG.debug(
                "Re-using " + tmpVarColl.count() + " results from count procedure's variant preliminary query");

    if (progress.hasAborted())
        return false;

    if (fNeedToFilterOnGenotypingData) { // now filter on genotyping data
        final ConcurrentLinkedQueue<Thread> queryThreadsToWaitFor = new ConcurrentLinkedQueue<Thread>(),
                removalThreadsToWaitFor = new ConcurrentLinkedQueue<Thread>();
        final AtomicInteger finishedThreadCount = new AtomicInteger(0);
        final ConcurrentSkipListSet<Comparable> allVariantsThatPassRunFilter = new ConcurrentSkipListSet<Comparable>();

        GigwaSearchVariantsExportRequest gsvr = new GigwaSearchVariantsExportRequest();
        gsvr.setAlleleCount(alleleCount);
        if (minposition != null)
            gsvr.setStart(minposition);
        if (maxposition != null)
            gsvr.setEnd(maxposition);
        gsvr.setGeneName(geneName);
        gsvr.setReferenceName(selectedSequences);
        gsvr.setSelectedVariantTypes(selectedVariantTypes);
        gsvr.setVariantEffect(variantEffects);
        gsvr.setVariantSetId(sModule + ServiceInterface.ID_SEPARATOR + projId);

        gsvr.setMissingData(missingData);
        gsvr.setMinmaf(minmaf);
        gsvr.setMaxmaf(maxmaf);
        gsvr.setGtPattern(gtPattern);
        HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>();
        annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold);
        annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold);
        gsvr.setAnnotationFieldThresholds(annotationFieldThresholds);
        gsvr.setCallSetIds(selectedIndividuals == null || selectedIndividuals.length() == 0
                ? getIndividualsInDbOrder(sModule, projId)
                : Arrays.asList(selectedIndividuals.split(";")));

        final GenotypingDataQueryBuilder genotypingDataQueryBuilder = new GenotypingDataQueryBuilder(gsvr,
                tmpVarColl);
        genotypingDataQueryBuilder.keepTrackOfPreFilters(!variantQueryDBList.isEmpty());
        try {
            final int nChunkCount = genotypingDataQueryBuilder.getNumberOfQueries();
            if (nChunkCount != partialCountArray.length) {
                LOG.error("Different number of chunks between counting and listing variant rows!");
                progress.setError("Different number of chunks between counting and listing variant rows!");
                return false;
            }
            if (nChunkCount > 1)
                LOG.debug("Query split into " + nChunkCount);

            ArrayList<List<DBObject>> genotypingDataPipelines = new ArrayList();
            while (genotypingDataQueryBuilder.hasNext())
                genotypingDataPipelines.add(genotypingDataQueryBuilder.next());

            ArrayList<Integer> chunkIndices = new ArrayList<Integer>();
            for (int i = 0; i < genotypingDataPipelines.size(); i++)
                chunkIndices.add(i);
            Collections.shuffle(chunkIndices);

            for (int i = 0; i < chunkIndices.size(); i++) {
                final int chunkIndex = chunkIndices.get(i);
                final List<DBObject> genotypingDataPipeline = genotypingDataPipelines.get(chunkIndex);

                if (progress.hasAborted()) {
                    genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
                    return false;
                }

                Thread t = new Thread() {
                    public void run() {
                        Cursor genotypingDataCursor = mongoTemplate
                                .getCollection(
                                        MongoTemplateManager.getMongoCollectionName(VariantRunData.class))
                                .aggregate(genotypingDataPipeline,
                                        AggregationOptions.builder().allowDiskUse(true).build());
                        final ArrayList<Comparable> variantsThatPassedRunFilter = new ArrayList<Comparable>();
                        while (genotypingDataCursor.hasNext())
                            variantsThatPassedRunFilter
                                    .add((Comparable) genotypingDataCursor.next().get("_id"));

                        if (variantQueryDBList.isEmpty()) // otherwise we won't need it
                            allVariantsThatPassRunFilter.addAll(variantsThatPassedRunFilter);
                        else { // mark the results we want to keep
                            final List<Comparable> lastUsedPreFilter = genotypingDataQueryBuilder
                                    .getPreFilteredIDsForChunk(chunkIndex);

                            Thread removalThread = new Thread() {
                                public void run() {
                                    genotypingDataPipeline.clear(); // release memory (VERY IMPORTANT)

                                    long beforeTempCollUpdate = System.currentTimeMillis();
                                    if (variantsThatPassedRunFilter.size() == lastUsedPreFilter.size())
                                        return; // none to remove

                                    Collection<Comparable> filteredOutVariants = variantsThatPassedRunFilter
                                            .size() == 0 ? lastUsedPreFilter
                                                    : CollectionUtils.subtract(lastUsedPreFilter,
                                                            variantsThatPassedRunFilter);
                                    BasicDBObject removalQuery = GenotypingDataQueryBuilder
                                            .tryAndShrinkIdList("_id", filteredOutVariants, 4);
                                    WriteResult wr = tmpVarColl.remove(removalQuery);
                                    LOG.debug("Chunk N." + (chunkIndex) + ": " + wr.getN()
                                            + " filtered-out temp records removed in "
                                            + (System.currentTimeMillis() - beforeTempCollUpdate) / 1000d
                                            + "s");

                                    progress.setCurrentStepProgress(
                                            (short) (finishedThreadCount.incrementAndGet() * 100
                                                    / nChunkCount));
                                }
                            };
                            removalThreadsToWaitFor.add(removalThread);
                            removalThread.start();
                        }
                    }
                };

                if (i % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS - 1))
                    t.run(); // sometimes run synchronously so that all queries are not sent at the same time (also helps smooth progress display)
                else {
                    queryThreadsToWaitFor.add(t);
                    t.start(); // run asynchronously for better speed
                }
            }

            // wait for all threads before moving to next phase
            for (Thread t : queryThreadsToWaitFor)
                t.join();
            for (Thread t : removalThreadsToWaitFor)
                t.join();
        } catch (Exception e) {
            genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
            throw e;
        }

        if (progress.hasAborted())
            return false;

        progress.addStep("Updating temporary results");
        progress.moveToNextStep();
        final long beforeTempCollUpdate = System.currentTimeMillis();
        mongoTemplate.getDb().setWriteConcern(WriteConcern.ACKNOWLEDGED);
        if (variantQueryDBList.isEmpty()) { // we filtered on runs only: keep track of the final dataset
            List<BasicDBObject> pipeline = new ArrayList<>();
            pipeline.add(new BasicDBObject("$match",
                    GenotypingDataQueryBuilder.tryAndShrinkIdList("_id", allVariantsThatPassRunFilter, 4)));
            BasicDBObject projectObject = new BasicDBObject("_id", "$_id");
            projectObject.put(
                    VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE,
                    "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                            + ReferencePosition.FIELDNAME_SEQUENCE);
            projectObject.put(
                    VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE,
                    "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                            + ReferencePosition.FIELDNAME_START_SITE);
            projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE);
            projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST,
                    "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST);
            projectObject.put(VariantData.FIELDNAME_VERSION, "$" + VariantData.FIELDNAME_VERSION);
            pipeline.add(new BasicDBObject("$project", projectObject));
            pipeline.add(new BasicDBObject("$out", tmpVarColl.getName()));
            variantColl.aggregate(pipeline);
            LOG.debug(tmpVarColl.count() + " temp records created in "
                    + (System.currentTimeMillis() - beforeTempCollUpdate) / 1000d + "s");
        }
    }

    progress.markAsComplete();
    LOG.info("findVariants took " + (System.currentTimeMillis() - before) / 1000d + "s");
    return true;
}