Example usage for com.mongodb DBCollection save

List of usage examples for com.mongodb DBCollection save

Introduction

In this page you can find the example usage for com.mongodb DBCollection save.

Prototype

public WriteResult save(final DBObject document) 

Source Link

Document

Update an existing document or insert a document depending on the parameter.

Usage

From source file:com.tomtom.speedtools.mongodb.migratedb.MongoDBMigrator.java

License:Apache License

/**
 * Migrate the database to the requested toVersion.
 *
 * @param db     Database to migrate.//ww  w . j  ava2  s .  com
 * @param dryRun In dry-run mode, no modifications will be made to the database.
 * @return True if the database was modified (other than updating the schema version).
 * @throws MigrationException If an error was encountered during migration.
 */
public boolean migrate(@Nonnull final MongoDB db, final boolean dryRun) throws MigrationException {
    assert db != null;
    LOG.info("MigrateDB starting..." + (dryRun ? " (dryRun mode)" : ""));

    final Map<String, MongoDBMigration> migrationMap = new HashMap<>();
    for (final MongoDBMigration migration : migrations) {
        if (migrationMap.put(migration.getFromVersion(), migration) != null) {
            throw new MigrationException(
                    "Multiple migrations found with 'from'-version: " + migration.getFromVersion());
        }
    }

    // Read the current version from the database.
    final DBCollection collection = db.getCollection(MIGRATOR_COLLECTION_NAME);
    DBObject info = collection.findOne(new BasicDBObject("_id", INFO_ID));
    if (info == null) {
        info = new BasicDBObject("_id", INFO_ID);
    }
    Object currentVersionObj = info.get(CURRENT_VERSION);
    if (currentVersionObj == null) {
        currentVersionObj = getFirstVersion();
        info.put(CURRENT_VERSION, currentVersionObj);
    }
    final String currentVersion = currentVersionObj.toString().trim();

    // Check whether a previous migration was in progress.
    if (info.get(BUSY) != null) {
        throw new MigrationException("Previous migration was unsuccesful. Please restore database.");
    }

    // Indicate that migration is in progress.
    info.put(BUSY, "true");
    if (!dryRun) {
        info.put(CURRENT_VERSION, getTargetVersion());
        collection.save(info);
    }

    // Create migration path to toVersion.
    final List<MongoDBMigration> migrationPath = new ArrayList<>();
    String version = currentVersion;

    // Create a migration path.
    while (!version.equals(getTargetVersion())) {
        final MongoDBMigration migration = migrationMap.get(version);
        if (migration == null) {
            throw new MigrationException(
                    "No migration possible from version: " + version + " to version " + getTargetVersion());
        }
        migrationPath.add(migration);
        version = migration.getToVersion();
    }

    // Start migrating.
    boolean databaseChanged = false;
    List<MongoDBMigrationProblem> problems = Collections.emptyList();
    for (final MongoDBMigration migration : migrationPath) {
        LOG.info("Migrating database from version " + migration.getFromVersion() + " to version "
                + migration.getToVersion());
        try {
            migration.setDryRun(dryRun); // Do not change order:
            databaseChanged = migration.migrateChangedDatabase(db) || databaseChanged; // Always execute migrate!
            problems = migration.flush();
            if (!problems.isEmpty()) {
                break;
            }
        } catch (final MigrationException e) {
            LOG.error("Migration failed, please restore database from backup: " + e.getMessage());
            throw e;
        } catch (final RuntimeException e) {
            LOG.error("Migration failed, please restore database from backup: " + e.getMessage());
            if (e.getCause() instanceof MigrationException) {
                throw (MigrationException) e.getCause();
            }
            throw new MigrationException(e);
        }
    }

    // Close migration.
    info.put(CURRENT_VERSION, getTargetVersion());
    info.removeField(BUSY);
    if (!dryRun) {
        collection.save(info);
    }

    // Show problems.
    if (!problems.isEmpty()) {
        final StringBuilder problemString = new StringBuilder();
        problemString.append("Migration problems encountered:");
        for (final MongoDBMigrationProblem problem : problems) {
            problemString.append("\n  ").append(problem.getPath()).append(" - ").append(problem.getProblem());
        }
        final String str = problemString.toString();
        LOG.error(str);
    } else {
        LOG.info("Migration OK");
    }

    // Dry-run info.
    if (dryRun) {
        LOG.info("Migration was run in dry-run mode. No modifications were made to the database.");
        return false;
    }

    // Real mode.
    if (databaseChanged) {
        LOG.info("Database records have been modified (and schema version was updated).");
    } else {
        LOG.info("No database records have been modified (but schema version was updated).");
    }

    // Now, throw an exception is something was wrong.
    if (!problems.isEmpty()) {
        throw new MigrationException("Migration was not successful. Please restore database.");
    }
    return databaseChanged;
}

From source file:com.wordnik.system.mongodb.RestoreUtil.java

License:Open Source License

protected void write(CollectionInfo info, DBObject dbo) throws Exception {
    DB db = MongoDBConnectionManager.getConnection("TARGET", DATABASE_HOST, DATABASE_NAME, DATABASE_USER_NAME,
            DATABASE_PASSWORD, SchemaType.READ_WRITE());

    DBCollection coll = db.getCollection(info.getName());
    coll.save(dbo);
}

From source file:de.flapdoodle.mongoom.datastore.Datastore.java

License:Apache License

private <T> void store(Operation operation, T entity) {
    IEntityTransformation<T> converter = _transformations.transformation((Class<T>) entity.getClass());
    DBCollection dbCollection = _db.getCollection(converter.collection().name());
    Object idValue = converter.getId(entity);
    Object versionValue = converter.getVersion(entity);

    //      if (idValue == null)
    //         throw new MappingException(entity.getClass(), "Key is NULL");
    //      DBObject convertedEntity = converter.convertTo(entity);

    BasicDBObject key = new BasicDBObject();
    key.put(Const.ID_FIELDNAME, idValue);
    if (versionValue != null)
        key.put(Const.VERSION_FIELDNAME, versionValue);

    boolean reReadId = true;
    boolean mustHaveObjectId = false;
    boolean update = false;

    switch (operation) {
    case Delete://from   w w w  . ja v  a2s.  c o m
        mustHaveObjectId = true;
        reReadId = false;
        break;
    case Save:
        mustHaveObjectId = true;
        break;
    case Update:
        reReadId = false;
        update = true;
        if (idValue == null)
            throw new MappingException(entity.getClass(), "Can not update Entities with Id not set");
        break;
    }

    try {
        _db.requestStart();
        if (mustHaveObjectId) {
            if ((idValue != null) && (!(idValue instanceof ObjectId))) {
                throw new MappingException(entity.getClass(), "Can not save Entities with custom Id");
            }
        }

        converter.newVersion(entity);
        DBObject convertedEntity = converter.asObject(entity);

        switch (operation) {
        case Insert:
            _logger.fine("Insert: " + convertedEntity);
            if (idValue != null) {
                _logger.log(Level.WARNING, "Insert with Id set: " + idValue, new Exception());
            }
            dbCollection.insert(convertedEntity);
            break;
        case Update:
            _logger.fine("Update: " + convertedEntity + " (Id: " + idValue + ")");
            //               BasicDBObject updateQuery=new BasicDBObject();
            //               updateQuery.put(Const.ID_FIELDNAME, idValue);
            dbCollection.update(key, convertedEntity, false, false);
            break;
        case Save:
            _logger.fine("Save: " + convertedEntity);
            dbCollection.save(convertedEntity);
            break;
        case Delete:
            _logger.fine("Delete: " + key);
            dbCollection.remove(key);
            break;
        default:
            throw new ObjectMapperException("Operation not supported: " + operation);
        }

        if (reReadId) {
            Object savedIdValue = convertedEntity.get(Const.ID_FIELDNAME);
            converter.setId(entity, savedIdValue);
        }

        Errors.checkError(_db, operation);

        if (operation == Operation.Delete) {
            converter.setId(entity, null);
        }
    } finally {
        _db.requestDone();
    }

}

From source file:de.unimannheim.infor.swt.uim.actions.MogoDBCreate.java

License:Open Source License

public static void entityupdate(String entitychildFQN, String entityfatherFQN) {
    try {//from   ww w  .  j  av a  2 s.co  m
        MongoClient mongoClient = new MongoClient(mdlocalhost, mdport);
        DB db = mongoClient.getDB(tmtextdb);
        boolean auth = db.authenticate(tmtextuser, tmtextpassword.toCharArray());
        DBCollection collection = db.getCollection("entity");

        DBCursor curs = collection.find();
        while (curs.hasNext()) {
            DBObject dbObject = curs.next();
            String FQN = ((BasicBSONObject) dbObject).getString("FQN");
            String exdirecttype = ((BasicBSONObject) dbObject).getString("directtype");
            if (FQN.equals(entityfatherFQN)) {

                dbObject.put("directtype", entitychildFQN);
                collection.save(dbObject);
            }

        }
    } catch (UnknownHostException e) {
        e.printStackTrace();
    } catch (MongoException e) {
        e.printStackTrace();
    }
}

From source file:de.unimannheim.infor.swt.uim.actions.MogoDBCreate.java

License:Open Source License

public static void Binaryconnectionupdate(String coninstanceFQN, String conFQN) {

    try {//from   w w  w . j  a v  a  2s .  co  m
        MongoClient mongoClient = new MongoClient(mdlocalhost, mdport);
        DB db = mongoClient.getDB(tmtextdb);
        boolean auth = db.authenticate(tmtextuser, tmtextpassword.toCharArray());
        DBCollection collection = db.getCollection("binaryconnection");

        DBCursor curs = collection.find();
        while (curs.hasNext()) {
            DBObject dbObject = curs.next();
            String FQN = ((BasicBSONObject) dbObject).getString("FQN");
            String exdirecttype = ((BasicBSONObject) dbObject).getString("directtype");
            if (FQN.equals(coninstanceFQN)) {

                dbObject.put("directtype", conFQN);
                collection.save(dbObject);
            }

        }
    } catch (UnknownHostException e) {
        e.printStackTrace();
    } catch (MongoException e) {
        e.printStackTrace();
    }
}

From source file:edu.wayne.cs.fms.controller.CRUD.java

public static void Save(DBObject doc, String tempColName, MongoClient mongoClient) {
    //MongoClient mongoClient = Connector.connect("localhost", 27017);
    DB db = mongoClient.getDB("project");
    DBCollection temp = db.getCollection(tempColName);
    temp.save(doc);
    //mongoClient.close();
}

From source file:eu.delving.core.storage.impl.UserRepoImpl.java

License:EUPL

private DBCollection users() {
    PortalTheme portalTheme = ThemeFilter.getTheme();
    String collectionName = portalTheme == null ? USERS_COLLECTION
            : String.format("%s_%s", USERS_COLLECTION, portalTheme.getName());
    DBCollection collection = mongoFactory.getMongo().getDB(databaseName).getCollection(collectionName);
    if (collection.count() == 0) {
        DBObject object = mob(User.EMAIL, "supervisor@delving.eu", User.ENABLED, true, User.FIRST_NAME,
                "Delving", User.LAST_NAME, "Melvin", User.USER_NAME, "supervisor", User.ROLE,
                User.Role.ROLE_GOD.toString());
        UserImpl user = new UserImpl(object);
        user.setPassword("melvin");
        collection.save(object);
    }/*from  w  w w .  j a  va  2s  .  co  m*/
    return collection;
}

From source file:eu.eubrazilcc.lvl.storage.mongodb.MongoDBConnector.java

License:EUPL

/**
 * Updates a object previously stored in a collection.
 * @param obj - value used to update the object
 * @param query - statement that is used to find the object in the collection
 * @param collection - collection where the object is searched
 *//*  w  w  w.  ja va 2 s  . co m*/
public void update(final DBObject obj, final DBObject query, final String collection) {
    checkArgument(obj != null, "Uninitialized object");
    checkArgument(isNotBlank(collection), "Uninitialized or invalid collection");
    final DB db = client().getDB(CONFIG_MANAGER.getDbName());
    final DBCollection dbcol = db.getCollection(collection);
    final BasicDBObject current = (BasicDBObject) dbcol.findOne(query);
    checkState(current != null, "Object not found");
    dbcol.save(BasicDBObjectBuilder.start(obj.toMap()).append("_id", current.get("_id")).get());
}

From source file:ezbake.example.ezmongo.EzMongoSampleClient.java

License:Apache License

public static void main(String[] args)
        throws VisibilityParseException, ClassificationConversionException, EzMongoBaseException {

    init();//w  w  w .  j  av a2 s.c  om

    DBObject helloWorld = new BasicDBObject("hello", "world");
    DBObject objToInsert = new BasicDBObject("text", helloWorld);

    // Here, we would insert the security tagging fields into the DBObject by calling a utility class (RedactHelper.java).
    Visibility vis = new Visibility();
    // Convert CAPCO to Accumulo-style boolean expression string and set it in the Visibility object.
    String booleanExpressionString = ClassificationUtils.getAccumuloVisibilityStringFromCAPCO("SECRET");
    vis.setFormalVisibility(booleanExpressionString);
    RedactHelper.setSecurityFieldsInDBObject(objToInsert, vis, "testAppId");

    // Call the Provenance service to get a unique ID for the document -
    //   the unique ID would be used for the Purge feature.

    // Get the mongo database & collection
    DB db = null;
    try {
        db = getDatabase("testdb");
    } catch (EzConfigurationLoaderException e) {
        e.printStackTrace();
    }
    DBCollection collection = db.getCollection("testCollection1");

    // Save to MongoDB
    collection.save(objToInsert);

    // Retrieve from Mongo
    DBCursor cursor = collection.find(objToInsert);
    if (cursor.hasNext()) {
        DBObject obj = cursor.next();
        System.out.println("obj: " + obj);
    }
}

From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java

License:Open Source License

/**
 * This method returns the number of variants that match provided parameters.
 *
 * @param request the request/*from  w  w w  .j a  v a 2 s. co m*/
 * @param sModule the module
 * @param projId the proj id
 * @param selectedVariantTypes the selected variant types
 * @param selectedSequences the selected sequences
 * @param selectedIndividuals the selected individuals
 * @param gtPattern the gt code
 * @param genotypeQualityThreshold the genotype quality threshold
 * @param readDepthThreshold the read depth threshold
 * @param missingData the missing data
 * @param minmaf the minmaf
 * @param maxmaf the maxmaf
 * @param minposition the minposition
 * @param maxposition the maxposition
 * @param alleleCount the allele count
 * @param geneName the gene name
 * @param variantEffects the variant effects
 * @param processID the process id
 * @return the long
 * @throws Exception the exception
 */
@RequestMapping(variantCountURL)
protected @ResponseBody long countVariants(HttpServletRequest request, @RequestParam("module") String sModule,
        @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes,
        @RequestParam("sequences") String selectedSequences,
        @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern,
        @RequestParam("genotypeQualityThreshold") Integer genotypeQualityThreshold,
        @RequestParam("readDepthThreshold") Integer readDepthThreshold,
        @RequestParam("missingData") Double missingData,
        @RequestParam(value = "minmaf", required = false) Float minmaf,
        @RequestParam(value = "maxmaf", required = false) Float maxmaf,
        @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition,
        @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName,
        @RequestParam("variantEffects") String variantEffects,
        @RequestParam("processID") final String processID) throws Exception {
    final ProgressIndicator progress = new ProgressIndicator(processID.substring(1 + processID.indexOf('|')),
            new String[0]);
    ProgressIndicator.registerProgressIndicator(progress);

    DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, progress.getProcessId(),
            true /*empty it*/);
    try {
        String queryKey = getQueryKey(request, sModule, projId, selectedVariantTypes, selectedSequences,
                selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData,
                minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects);

        final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
        DBCollection cachedCountcollection = mongoTemplate.getCollection(MgdbDao.COLLECTION_NAME_CACHED_COUNTS);
        //         cachedCountcollection.drop();
        DBCursor countCursor = cachedCountcollection.find(new BasicDBObject("_id", queryKey));
        Long count = null;
        if (countCursor.hasNext()) {
            count = 0l;
            for (Object aPartialCount : ((BasicDBList) countCursor.next()
                    .get(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE)).toArray())
                count += (Long) aPartialCount;
        }
        LOG.debug((count == null ? "new" : "existing") + " queryKey hash: " + queryKey);
        if (count == null) {
            long before = System.currentTimeMillis();

            progress.addStep("Counting matching variants");
            String sRegexOrAggregationOperator = GenotypingDataQueryBuilder.getGenotypePatternToQueryMap()
                    .get(gtPattern);

            List<String> alleleCountList = alleleCount.length() == 0 ? null
                    : Arrays.asList(alleleCount.split(";"));

            GenotypingProject genotypingProject = mongoTemplate.findById(projId, GenotypingProject.class);
            if (genotypingProject.getAlleleCounts().size() != 1
                    || genotypingProject.getAlleleCounts().iterator().next() != 2) { // Project does not only have bi-allelic data: make sure we can apply MAF filter on selection
                boolean fExactlyOneNumberOfAllelesSelected = alleleCountList != null
                        && alleleCountList.size() == 1;
                boolean fBiAllelicSelected = fExactlyOneNumberOfAllelesSelected
                        && "2".equals(alleleCountList.get(0));
                boolean fMafRequested = (maxmaf != null && maxmaf < 50) || (minmaf != null && minmaf > 0);
                if (fMafRequested && !fBiAllelicSelected) {
                    progress.setError("MAF is only supported on biallelic data!");
                    return 0l;
                }
            }

            String actualSequenceSelection = selectedSequences;
            if (actualSequenceSelection.length() == 0) {
                ArrayList<String> externallySelectedSeqs = getSequenceIDsBeingFilteredOn(request, sModule);
                if (externallySelectedSeqs != null)
                    actualSequenceSelection = StringUtils.join(externallySelectedSeqs, ";");
            }

            boolean fNeedToFilterOnGenotypingData = needToFilterOnGenotypingData(sModule, projId,
                    sRegexOrAggregationOperator, genotypeQualityThreshold, readDepthThreshold, missingData,
                    minmaf, maxmaf, geneName, variantEffects);

            BasicDBList variantQueryDBList = buildVariantDataQuery(sModule, projId,
                    selectedVariantTypes.length() == 0 ? null : Arrays.asList(selectedVariantTypes.split(";")),
                    actualSequenceSelection.length() == 0 ? null
                            : Arrays.asList(actualSequenceSelection.split(";")),
                    minposition, maxposition, alleleCountList);
            if (variantQueryDBList.isEmpty()) {
                if (!fNeedToFilterOnGenotypingData && mongoTemplate.count(null, GenotypingProject.class) == 1)
                    count = mongoTemplate.count(new Query(), VariantData.class); // no filter whatsoever
            } else {
                if (!fNeedToFilterOnGenotypingData) { // filtering on variant features only: we just need a count
                    count = mongoTemplate.getCollection(mongoTemplate.getCollectionName(VariantData.class))
                            .count(new BasicDBObject("$and", variantQueryDBList));
                } else { // filtering on variant features and genotyping data: we need a list of variant IDs to restrict the genotyping data search to
                    long beforeAggQuery = System.currentTimeMillis();
                    progress.setProgressDescription("Filtering variants for count...");

                    DBCollection variantColl = mongoTemplate
                            .getCollection(mongoTemplate.getCollectionName(VariantData.class));
                    List<DBObject> pipeline = new ArrayList<DBObject>();
                    pipeline.add(new BasicDBObject("$match", new BasicDBObject("$and", variantQueryDBList)));
                    BasicDBObject projectObject = new BasicDBObject("_id", "$_id");
                    projectObject.put(
                            VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_SEQUENCE,
                            "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_SEQUENCE);
                    projectObject.put(
                            VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_START_SITE,
                            "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_START_SITE);
                    projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE);
                    projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST,
                            "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST);
                    pipeline.add(new BasicDBObject("$project", projectObject));
                    pipeline.add(new BasicDBObject("$out", tmpVarColl.getName()));
                    variantColl.aggregate(pipeline);

                    mongoTemplate.getDb().setWriteConcern(WriteConcern.ACKNOWLEDGED);
                    LOG.debug("Variant preliminary query found " + tmpVarColl.count() + " results in "
                            + (System.currentTimeMillis() - beforeAggQuery) / 1000f + "s");

                    progress.setProgressDescription(null);
                    if (tmpVarColl.count() == 0)
                        count = 0l; // no need to search any further
                }
            }

            if (count != null) {
                BasicDBObject dbo = new BasicDBObject("_id", queryKey);
                dbo.append(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE, new Long[] { count });
                cachedCountcollection.save(dbo);
            } else { // now filter on genotyping data
                List<String> selectedIndividualList = selectedIndividuals.length() == 0 ? null
                        : Arrays.asList(selectedIndividuals.split(";"));
                if (selectedIndividualList == null)
                    selectedIndividualList = getIndividualsInDbOrder(sModule, projId);

                GigwaSearchVariantsExportRequest gsvr = new GigwaSearchVariantsExportRequest();
                gsvr.setAlleleCount(alleleCount);
                if (minposition != null)
                    gsvr.setStart(minposition);
                if (maxposition != null)
                    gsvr.setEnd(maxposition);
                gsvr.setGeneName(geneName);
                gsvr.setReferenceName(selectedSequences);
                gsvr.setSelectedVariantTypes(selectedVariantTypes);
                gsvr.setVariantEffect(variantEffects);
                gsvr.setVariantSetId(sModule + ServiceInterface.ID_SEPARATOR + projId);

                gsvr.setMissingData(missingData);
                gsvr.setMinmaf(minmaf);
                gsvr.setMaxmaf(maxmaf);
                gsvr.setGtPattern(gtPattern);

                HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>();
                annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold);
                annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold);
                gsvr.setAnnotationFieldThresholds(annotationFieldThresholds);
                gsvr.setCallSetIds(selectedIndividualList);

                GenotypingDataQueryBuilder genotypingDataQueryBuilder = new GenotypingDataQueryBuilder(gsvr,
                        tmpVarColl);
                try {
                    final int nChunkCount = genotypingDataQueryBuilder.getNumberOfQueries();
                    if (nChunkCount > 1)
                        LOG.debug("Query split into " + nChunkCount);

                    final Long[] partialCountArray = new Long[nChunkCount];
                    final Builder aggOpts = AggregationOptions.builder().allowDiskUse(false);
                    final ArrayList<Thread> threadsToWaitFor = new ArrayList<Thread>();
                    final AtomicInteger finishedThreadCount = new AtomicInteger(0);

                    ArrayList<List<DBObject>> genotypingDataPipelines = new ArrayList();
                    while (genotypingDataQueryBuilder.hasNext())
                        genotypingDataPipelines.add(genotypingDataQueryBuilder.next());

                    ArrayList<Integer> chunkIndices = new ArrayList<Integer>();
                    for (int i = 0; i < genotypingDataPipelines.size(); i++)
                        chunkIndices.add(i);
                    Collections.shuffle(chunkIndices);

                    for (int i = 0; i < chunkIndices.size()/*/2*/; i++) {
                        final List<DBObject> genotypingDataPipeline = genotypingDataPipelines
                                .get(chunkIndices.get(i));

                        // Now the $group operation, used for counting
                        DBObject groupFields = new BasicDBObject("_id", null);
                        groupFields.put("count", new BasicDBObject("$sum", 1));
                        genotypingDataPipeline.add(new BasicDBObject("$group", groupFields));

                        if (i == 0 && tmpVarColl.count() <= 5)
                            LOG.debug(genotypingDataPipeline);

                        if (progress.hasAborted()) {
                            genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
                            return 0l;
                        }

                        final int chunkIndex = i;

                        Thread t = new Thread() {
                            public void run() {
                                //                               long b4 = System.currentTimeMillis();
                                Cursor it = mongoTemplate
                                        .getCollection(MongoTemplateManager
                                                .getMongoCollectionName(VariantRunData.class))
                                        .aggregate(genotypingDataPipeline, aggOpts.build());
                                partialCountArray[chunkIndex] = it.hasNext()
                                        ? ((Number) it.next().get("count")).longValue()
                                        : 0;
                                progress.setCurrentStepProgress(
                                        (short) (finishedThreadCount.incrementAndGet() * 100 / nChunkCount));
                                //                           System.out.println("chunk " + chunkIndex + " took " + (System.currentTimeMillis() - b4));
                                genotypingDataPipeline.clear(); // release memory (VERY IMPORTANT)
                            }
                        };

                        if (i % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS
                                - 1)) {
                            t.run(); // run synchronously
                        } else {
                            threadsToWaitFor.add(t);
                            t.start(); // run asynchronously for better speed
                        }
                    }

                    for (Thread t : threadsToWaitFor) // wait for all threads before moving to next phase
                        t.join();

                    progress.setCurrentStepProgress(100);

                    count = 0l;
                    for (Long partialCount : partialCountArray)
                        count += partialCount;

                    BasicDBObject dbo = new BasicDBObject("_id", queryKey);
                    dbo.append(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE, partialCountArray);
                    cachedCountcollection.save(dbo);
                } catch (Exception e) {
                    genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
                    throw e;
                }
            }
            LOG.info("countVariants found " + count + " results in "
                    + (System.currentTimeMillis() - before) / 1000d + "s");
        }

        progress.markAsComplete();
        if (progress.hasAborted())
            return 0l;

        return count;
    } finally {
        //         getTemporaryVariantCollection(sModule, progress.getProcessId(), true);   // always empty it
    }
}