Example usage for com.mongodb BasicDBObjectBuilder start

List of usage examples for com.mongodb BasicDBObjectBuilder start

Introduction

In this page you can find the example usage for com.mongodb BasicDBObjectBuilder start.

Prototype

public static BasicDBObjectBuilder start(final String key, final Object val) 

Source Link

Document

Creates a builder initialized with the given key/value.

Usage

From source file:org.opencb.opencga.catalog.db.mongodb.CatalogMongoDBAdaptor.java

License:Apache License

@Override
public QueryResult setFileAcl(int fileId, Acl newAcl) throws CatalogDBException {
    long startTime = startQuery();
    String userId = newAcl.getUserId();
    if (!userDBAdaptor.userExists(userId)) {
        throw new CatalogDBException("Can not set ACL to non-existent user: " + userId);
    }// w w w . j  a  v  a 2 s  .c  o m

    DBObject newAclObject = getDbObject(newAcl, "ACL");

    List<Acl> aclList = getFileAcl(fileId, userId).getResult();
    DBObject match;
    DBObject updateOperation;
    if (aclList.isEmpty()) { // there is no acl for that user in that file. push
        match = new BasicDBObject(_ID, fileId);
        updateOperation = new BasicDBObject("$push", new BasicDBObject("acl", newAclObject));
    } else { // there is already another ACL: overwrite
        match = BasicDBObjectBuilder.start(_ID, fileId).append("acl.userId", userId).get();
        updateOperation = new BasicDBObject("$set", new BasicDBObject("acl.$", newAclObject));
    }
    QueryResult update = fileCollection.update(match, updateOperation, null);
    return endQuery("set file acl", startTime);
}

From source file:org.opencb.opencga.catalog.db.mongodb.CatalogMongoDBAdaptor.java

License:Apache License

@Override
public QueryResult<Dataset> createDataset(int studyId, Dataset dataset, QueryOptions options)
        throws CatalogDBException {
    long startTime = startQuery();
    checkStudyId(studyId);//w  ww.j  a  v a  2  s. c  o  m

    QueryResult<Long> count = studyCollection
            .count(BasicDBObjectBuilder.start(_ID, studyId).append("datasets.name", dataset.getName()).get());

    if (count.getResult().get(0) > 0) {
        throw new CatalogDBException(
                "Dataset { name: \"" + dataset.getName() + "\" } already exists in this study.");
    }

    int newId = getNewId();
    dataset.setId(newId);

    DBObject datasetObject = getDbObject(dataset, "Dataset");
    QueryResult<WriteResult> update = studyCollection.update(new BasicDBObject(_ID, studyId),
            new BasicDBObject("$push", new BasicDBObject("datasets", datasetObject)), null);

    if (update.getResult().get(0).getN() == 0) {
        throw CatalogDBException.idNotFound("Study", studyId);
    }

    return endQuery("createDataset", startTime, getDataset(newId, options));
}

From source file:org.opencb.opencga.catalog.db.mongodb.CatalogMongoDBAdaptor.java

License:Apache License

/**
 * Tool methods/*from  w  w w . j a v a2 s.com*/
 * ***************************
 */

@Override
public QueryResult<Tool> createTool(String userId, Tool tool) throws CatalogDBException {
    long startTime = startQuery();

    if (!userDBAdaptor.userExists(userId)) {
        throw new CatalogDBException("User {id:" + userId + "} does not exist");
    }

    // Check if tools.alias already exists.
    DBObject countQuery = BasicDBObjectBuilder.start(_ID, userId).append("tools.alias", tool.getAlias()).get();
    QueryResult<Long> count = userCollection.count(countQuery);
    if (count.getResult().get(0) != 0) {
        throw new CatalogDBException("Tool {alias:\"" + tool.getAlias() + "\"} already exists in this user");
    }

    tool.setId(getNewId());

    DBObject toolObject = getDbObject(tool, "tool");
    DBObject query = new BasicDBObject(_ID, userId);
    query.put("tools.alias", new BasicDBObject("$ne", tool.getAlias()));
    DBObject update = new BasicDBObject("$push", new BasicDBObject("tools", toolObject));

    //Update object
    QueryResult<WriteResult> queryResult = userCollection.update(query, update, null);

    if (queryResult.getResult().get(0).getN() == 0) { // Check if the project has been inserted
        throw new CatalogDBException("Tool {alias:\"" + tool.getAlias() + "\"} already exists in this user");
    }

    return endQuery("Create Job", startTime, getTool(tool.getId()).getResult());
}

From source file:org.opencb.opencga.catalog.db.mongodb.CatalogMongoDBAdaptor.java

License:Apache License

@Override
public int getToolId(String userId, String toolAlias) throws CatalogDBException {
    DBObject query = BasicDBObjectBuilder.start(_ID, userId).append("tools.alias", toolAlias).get();
    DBObject projection = new BasicDBObject("tools",
            new BasicDBObject("$elemMatch", new BasicDBObject("alias", toolAlias)));

    QueryResult<DBObject> queryResult = userCollection.find(query, projection, null);
    if (queryResult.getNumResults() != 1) {
        throw new CatalogDBException("Tool {alias:" + toolAlias + "} no exists");
    }/*from www .  j  a  v a2  s .  co  m*/
    User user = parseUser(queryResult);
    return user.getTools().get(0).getId();
}

From source file:org.opencb.opencga.storage.mongodb.alignment.IndexedAlignmentDBAdaptor.java

License:Apache License

@Override
public QueryResult getAllIntervalFrequencies(Region region, QueryOptions options) {
    int size = options.getInt(QO_INTERVAL_SIZE, 2000);
    String fileId = options.getString(QO_FILE_ID);
    int chunkSize = options.getInt(QO_COVERAGE_CHUNK_SIZE, 200);

    if (size % chunkSize != 0) {
        size -= size % chunkSize;//from w w  w .j ava  2s. co  m
    }

    List<DBObject> operations = new LinkedList<>();
    operations.add(new BasicDBObject("$match", new BasicDBObject("$and", Arrays.asList(
            new BasicDBObject(CoverageMongoDBWriter.START_FIELD, new BasicDBObject("$gt", region.getStart())),
            new BasicDBObject(CoverageMongoDBWriter.START_FIELD, new BasicDBObject("$lt", region.getEnd())),
            new BasicDBObject(CoverageMongoDBWriter.CHR_FIELD, region.getChromosome()),
            new BasicDBObject(CoverageMongoDBWriter.SIZE_FIELD, chunkSize)))));
    operations.add(new BasicDBObject("$unwind", "$" + CoverageMongoDBWriter.FILES_FIELD));
    operations.add(new BasicDBObject("$match", new BasicDBObject(
            CoverageMongoDBWriter.FILES_FIELD + "." + CoverageMongoDBWriter.FILE_ID_FIELD, fileId)));
    String startField = "$" + CoverageMongoDBWriter.START_FIELD;
    String averageField = "$" + CoverageMongoDBWriter.FILES_FIELD + "." + CoverageMongoDBWriter.AVERAGE_FIELD;
    operations
            .add(new BasicDBObject("$group",
                    BasicDBObjectBuilder
                            .start("_id",
                                    new BasicDBObject("$divide",
                                            Arrays.asList(
                                                    new BasicDBObject("$subtract",
                                                            Arrays.asList(startField,
                                                                    new BasicDBObject("$mod",
                                                                            Arrays.asList(startField, size)))),
                                                    size)))
                            .append(FEATURES_COUNT, new BasicDBObject("$sum", averageField
                            //                                new BasicDBObject(
                            //                                        "$divide",
                            //                                        Arrays.asList(
                            //                                                averageField,
                            //                                                size / chunkSize
                            //                                        )
                            //                                )
                            )).get()));
    operations.add(new BasicDBObject("$sort", new BasicDBObject("_id", 1)));
    StringBuilder mongoAggregate = new StringBuilder("db.")
            .append(CoverageMongoDBWriter.COVERAGE_COLLECTION_NAME + ".aggregate( [");
    for (DBObject operation : operations) {
        mongoAggregate.append(operation.toString()).append(" , ");
    }
    mongoAggregate.append("])");
    System.out.println("use " + mongoDataStore.getDatabaseName());
    System.out.println(mongoAggregate.toString());

    /*************/ //TODO: This should work, but doesn't
    System.out.println("dbName" + mongoDataStore.getDb().getName().toString());
    MongoDBCollection collection = mongoDataStore.getCollection(CoverageMongoDBWriter.COVERAGE_COLLECTION_NAME);
    QueryResult<DBObject> aggregate = collection.aggregate(operations, null);
    /*************/

    /*************/ //TODO: What's going on?
    //        long startTime = System.currentTimeMillis();
    //        AggregationOutput aggregationOutput = mongoDataStore.getDb().getCollection(CoverageMongoDBWriter.COVERAGE_COLLECTION_NAME).aggregate(operations);
    //
    //        List<DBObject> results = new LinkedList<>();
    //        for (DBObject object : aggregationOutput.results()) {
    //            results.add(object);
    //        }
    //
    //        long endTime = System.currentTimeMillis();
    //        QueryResult<DBObject> aggregate = new QueryResult<>(fileId, ((int) (endTime - startTime)), results.size(), results.size(), "", "", results);
    /*************/

    //        System.out.println(collection.find(new BasicDBObject(), new QueryOptions("limit", 2), null));
    //        System.out.println(collection.find(new BasicDBObject("files.id", "34"), new QueryOptions("limit", 2), null));
    //        System.out.println(collection.find(new BasicDBObject("files.id", "7"), new QueryOptions("limit", 2), null));
    //        System.out.println(collection.find(new BasicDBObject("files.id", "4"), new QueryOptions("limit", 2), null));

    for (DBObject object : aggregate.getResult()) {
        int id = getInt(object, "_id");
        int start = id * size + 1;
        int end = id * size + size;
        object.put("chromosome", region.getChromosome());
        object.put("start", start);
        object.put("end", end);
        double featuresCount = getDouble(object, FEATURES_COUNT);
        //            object.put("features_count_old", featureCount);
        featuresCount /= 1 + (end - 1) / chunkSize - (start + chunkSize - 2) / chunkSize;
        object.put(FEATURES_COUNT, featuresCount);
        //            object.put("div1", end/chunkSize - start/chunkSize);
        //            object.put("div2", end/chunkSize - (start+chunkSize)/chunkSize);
        //            object.put("div3", (end-1)/chunkSize - (start+chunkSize-2)/chunkSize);
    }

    aggregate.setId(fileId);

    return aggregate;
}

From source file:org.opendaylight.controller.samples.onftappingapp.TappingApp.java

License:Apache License

public List<LoggedMessage> loadLoggedMessagesFromDatabase(final Date startDate, final Date endDate) {
    DBCollection logTable = database.getCollection(DatabaseNames.getLoggerTableName());

    // Create a query to look for log entries after the start date
    BasicDBObject searchQuery = new BasicDBObject();
    searchQuery.put("timestamp", BasicDBObjectBuilder.start("$gte", startDate).add("$lte", endDate).get());

    DBCursor cursor = logTable.find(searchQuery);
    List<LoggedMessage> loggedMessageList = processLogMessageCursor(cursor);
    cursor.close();//ww w.  java2 s . c  o  m

    return loggedMessageList;
}

From source file:org.sglover.checksum.dao.mongo.MongoChecksumDAO.java

License:Open Source License

private DBObject toDBObject(Checksum checksum) {
    DBObject dbObject = BasicDBObjectBuilder.start("i", checksum.getBlockIndex()).add("h", checksum.getHash())
            .add("a", checksum.getAdler32()).add("m", checksum.getMd5()).get();
    return dbObject;
}

From source file:org.sglover.entities.dao.mongo.MongoEntitiesDAO.java

License:Open Source License

private void init() {
    map.put("name", "nm");
    map.put("location", "l");
    map.put("misc", "mi");
    map.put("money", "m");
    map.put("date", "d");
    map.put("org", "o");

    map.put("nm", "name");
    map.put("l", "location");
    map.put("mi", "misc");
    map.put("m", "money");
    map.put("d", "date");
    map.put("o", "org");

    allTypes.add("name");
    allTypes.add("location");
    allTypes.add("org");
    allTypes.add("misc");
    allTypes.add("date");
    allTypes.add("money");

    if (db == null) {
        throw new RuntimeException("Mongo DB must not be null");
    }//  ww w . j  a  v  a2s  .c o  m

    this.entitiesData = getCollection(db, entitiesCollectionName, WriteConcern.ACKNOWLEDGED);

    {
        DBObject keys = BasicDBObjectBuilder.start("n", 1).add("v", 1).get();
        this.entitiesData.ensureIndex(keys, "main", false);
    }
}

From source file:org.sglover.entities.dao.mongo.MongoEntitiesDAO.java

License:Open Source License

private void addEntities(Node node, String type, String key, Collection<Entity<String>> entities) {
    BulkWriteOperation bulk = entitiesData.initializeUnorderedBulkOperation();

    String nodeId = node.getNodeId();
    long nodeInternalId = node.getNodeInternalId();
    String nodeVersion = node.getVersionLabel();

    if (entities.size() > 0) {
        int expected = entities.size();
        for (Entity<String> nameEntity : entities) {
            List<EntityLocation> locations = nameEntity.getLocations();
            List<DBObject> locs = new LinkedList<>();
            for (EntityLocation location : locations) {
                long start = location.getBeginOffset();
                long end = location.getEndOffset();
                String context = location.getContext();
                double probability = location.getProbability();

                DBObject locDBObject = BasicDBObjectBuilder.start("s", start).add("e", end)
                        .add("p", probability).add("c", context).get();
                locs.add(locDBObject);//w w  w .j  av  a2s .co m
            }

            DBObject dbObject = BasicDBObjectBuilder.start("n", nodeId).add("ni", nodeInternalId)
                    .add("v", nodeVersion).add("t", type).add(key, nameEntity.getEntity())
                    .add("c", nameEntity.getCount()).add("locs", locs).get();
            bulk.insert(dbObject);
        }

        BulkWriteResult result = bulk.execute();
        int inserted = result.getInsertedCount();

        if (expected != inserted) {
            throw new RuntimeException("Mongo write failed");
        }
    }
}

From source file:org.sglover.entities.dao.mongo.MongoEntitiesDAO.java

License:Open Source License

@Override
// TODO use skip and maxItems
public Stream<Entity<String>> getNames(Node node, int skip, int maxItems) {
    String nodeId = node.getNodeId();
    String nodeVersion = node.getVersionLabel();

    Collection<Entity<String>> ret = new LinkedList<>();

    QueryBuilder queryBuilder = QueryBuilder.start("n").is(nodeId).and("v").is(nodeVersion);
    DBObject query = queryBuilder.get();

    BasicDBObjectBuilder orderByBuilder = BasicDBObjectBuilder.start("nm", 1);
    DBObject orderBy = orderByBuilder.get();

    DBCursor cursor = entitiesData.find(query).sort(orderBy);
    try {/*from   www .j  a va 2 s  . c om*/
        for (DBObject dbObject : cursor) {
            String name = (String) dbObject.get("nm");
            int count = (Integer) dbObject.get("c");
            String type = map.get("nm");
            Entity<String> entity = new Entity<>(EntityType.valueOf(type), name, count);
            ret.add(entity);
        }
    } finally {
        if (cursor != null) {
            cursor.close();
        }
    }

    return ret.stream();
}