Example usage for com.mongodb QueryBuilder start

List of usage examples for com.mongodb QueryBuilder start

Introduction

In this page you can find the example usage for com.mongodb QueryBuilder start.

Prototype

public static QueryBuilder start() 

Source Link

Document

Returns a new QueryBuilder.

Usage

From source file:com.mobileman.kuravis.core.services.user.impl.UserServiceImpl.java

License:Apache License

/** 
 * {@inheritDoc}/*from w ww . j av  a 2s.  c om*/
 */
@Override
public Map<String, DBObject> findUsersData(Collection<String> usersId, String... properties) {
    if (usersId.isEmpty()) {
        return Collections.emptyMap();
    }

    Map<String, DBObject> result = new HashMap<String, DBObject>();
    DBObject filter = QueryBuilder.start().put(EntityUtils.ID).in(usersId).get();
    DBObject props = new BasicDBObject();
    for (String property : properties) {
        props.put(property, 1);
    }
    DBCursor cursor = getCollection().find(filter, props);
    while (cursor.hasNext()) {
        DBObject user = cursor.next();
        result.put(EntityUtils.getEntityId(user), user);
    }

    return result;
}

From source file:com.stratio.deep.examples.java.extractor.ReadingCellFromMongoDB.java

License:Apache License

public static void doMain(String[] args) {
    String job = "java:readingCellFromMongoDB";

    String host = "127.0.0.1:27017";

    String database = "test";
    String inputCollection = "input";

    // Creating the Deep Context where args are Spark Master and Job Name
    ContextProperties p = new ContextProperties(args);
    DeepSparkContext deepContext = new DeepSparkContext(p.getCluster(), job, p.getSparkHome(), p.getJars());

    QueryBuilder query = QueryBuilder.start();
    //        query.and("number").greaterThan(27).lessThan(30);

    BSONObject bsonSort = new BasicBSONObject();
    bsonSort.put("number", 1);

    BSONObject bsonFields = new BasicBSONObject();
    bsonFields.put("number", 1);
    bsonFields.put("text", 1);
    bsonFields.put("_id", 0);
    //TODO review

    Filter filter = new Filter("number");
    filter.greaterThan(1);//from   w w  w . ja  v  a 2 s . c  om
    Filter filter2 = new Filter("number");
    filter2.lessThanEquals(10);
    Filter[] filters = new Filter[] { filter, filter2 };

    ExtractorConfig<Cells> config = new ExtractorConfig();

    config.setExtractorImplClass(MongoCellExtractor.class);
    Map<String, Serializable> values = new HashMap<>();
    values.put(ExtractorConstants.DATABASE, database);
    values.put(ExtractorConstants.FILTER_QUERY, filters);
    values.put(ExtractorConstants.COLLECTION, inputCollection);
    values.put(ExtractorConstants.HOST, host);

    config.setValues(values);

    RDD<Cells> inputRDDEntity = deepContext.createRDD(config);

    LOG.info("count : " + inputRDDEntity.count());

    LOG.info("prints first cell  : " + inputRDDEntity.first());

    deepContext.stop();
}

From source file:com.stratio.deep.examples.java.factory.ReadingCellFromMongoDB.java

License:Apache License

public static void doMain(String[] args) {
    String job = "java:readingCellFromMongoDB";

    String host = "127.0.0.1:27017";

    String database = "test";
    String inputCollection = "input";

    // Creating the Deep Context where args are Spark Master and Job Name
    ContextProperties p = new ContextProperties(args);
    DeepSparkContext deepContext = new DeepSparkContext(p.getCluster(), job, p.getSparkHome(), p.getJars());

    QueryBuilder query = QueryBuilder.start();
    query.and("number").greaterThan(27).lessThan(30);

    DBObject bsonSort = new BasicDBObject();
    bsonSort.put("number", 1);

    DBObject bsonFields = new BasicDBObject();
    bsonFields.put("number", 1);
    bsonFields.put("text", 1);
    bsonFields.put("_id", 0);

    MongoDeepJobConfig inputConfigEntity = MongoConfigFactory.createMongoDB().host(host).database(database)
            .collection(inputCollection).createInputSplit(false).filterQuery(query).sort(bsonSort)
            .fields(bsonFields);/*from  ww w  .  j  av a2  s.  com*/

    RDD inputRDDEntity = deepContext.createRDD(inputConfigEntity);

    LOG.info("count : " + inputRDDEntity.count());
    LOG.info("prints first cell  : " + inputRDDEntity.first());

    deepContext.stop();
}

From source file:com.stratio.deep.mongodb.config.MongoDeepJobConfig.java

License:Apache License

/**
 * Filter query./* www  . ja  v  a 2s.co  m*/
 *
 * @param filters the filters
 * @return the mongo deep job config
 */
public MongoDeepJobConfig<T> filterQuery(Filter[] filters) {

    if (filters.length > 0) {
        List<BasicDBObject> list = new ArrayList<>();

        QueryBuilder queryBuilder = QueryBuilder.start();
        for (int i = 0; i < filters.length; i++) {
            BasicDBObject bsonObject = new BasicDBObject();

            Filter filter = filters[i];
            if (filter.getFilterType().equals(FilterType.EQ)) {
                bsonObject.put(filter.getField(), filter.getValue());
            } else {
                bsonObject.put(filter.getField(), new BasicDBObject(
                        "$".concat(filter.getFilterType().getFilterTypeId().toLowerCase()), filter.getValue()));
            }

            list.add(bsonObject);
        }
        queryBuilder.and(list.toArray(new BasicDBObject[list.size()]));

        filterQuery(queryBuilder);
    }
    return this;

}

From source file:com.stratio.deep.mongodb.reader.MongoReader.java

License:Apache License

/**
 * Generate filter query.//from   w ww  .  j  a v a2 s  .c om
 *
 * @param partition the partition
 * @return the dB object
 */
private DBObject generateFilterQuery(MongoPartition partition) {

    if (mongoDeepJobConfig.getQuery() != null) {
        QueryBuilder queryBuilder = QueryBuilder.start();

        queryBuilder.and(createQueryPartition(partition), mongoDeepJobConfig.getQuery());

        LOG.debug("mongodb query " + queryBuilder.get());

        return queryBuilder.get();
    }

    return createQueryPartition(partition);

}

From source file:com.streamreduce.storm.MongoClient.java

License:Apache License

/**
 * Returns the list of events in the Nodeable datastore between the Dates specified by the since and before
 * parameters./*ww w.j ava  2s.c o  m*/
 *
 * @param since the date (exclusive) to get the events after.  A null means this parameter is ignored.
 * @param until the date (inclusive) to get the events before.  A null means this parameter is ignored.
 * @return the list of events or an empty list if there are none
 */
public List<BasicDBObject> getEvents(Date since, Date until) {
    DB connectionsDb = getDB("nodeablemsgdb");

    QueryBuilder queryBuilder = QueryBuilder.start();
    if (since != null) {
        queryBuilder.and("timestamp").greaterThan(since.getTime());
    }
    if (until != null) {
        queryBuilder.and("timestamp").lessThanEquals(until.getTime());
    }

    DBObject query = queryBuilder.get();
    return asList(connectionsDb.getCollection("eventStream").find(query));
}

From source file:de.otto.mongodb.profiler.op.OpProfileDataFetcher.java

License:Apache License

private DBCursor getCursor() {

    synchronized (cursorMutex) {

        // Close stale cursor
        if (cursor != null && cursor.getCursorId() == 0L) {
            cursor.close();//www .java  2 s  .co m
            cursor = null;
        }

        // Create new cursor
        if (cursor == null && db.collectionExists(COLLECTION)) {

            if (lastTs == null) {
                lastTs = DateTime.now(DateTimeZone.UTC);
            }

            final DBCollection collection = db.getCollection(COLLECTION);
            final DBObject query = QueryBuilder.start()
                    .and(QueryBuilder.start("ns").notEquals(collection.getFullName()).get(),
                            QueryBuilder.start("ts").greaterThan(lastTs.toDate()).get())
                    .get();
            final DBObject sortBy = new BasicDBObject("$natural", 1);
            final DBCursor cursor = collection.find(query).sort(sortBy).batchSize(100)
                    .addOption(Bytes.QUERYOPTION_TAILABLE).addOption(Bytes.QUERYOPTION_AWAITDATA);
            this.cursor = cursor;
        }
    }

    return cursor;
}

From source file:fr.wseduc.rack.services.RackServiceMongoImpl.java

License:Open Source License

public void listRack(UserInfos user, Handler<Either<String, JsonArray>> handler) {
    QueryBuilder query = QueryBuilder.start().or(QueryBuilder.start("to").is(user.getUserId()).get(),
            QueryBuilder.start("from").is(user.getUserId()).get()).and("file").exists(true);

    mongo.find(collection, MongoQueryBuilder.build(query), MongoDbResult.validResultsHandler(handler));
}

From source file:io.hawkcd.services.PipelineService.java

License:Apache License

@Override
@Authorization(scope = PermissionScope.PIPELINE, type = PermissionType.OPERATOR)
public ServiceResult add(Pipeline pipeline) {
    PipelineDefinition pipelineDefinition = (PipelineDefinition) this.pipelineDefinitionService
            .getById(pipeline.getPipelineDefinitionId()).getEntity();
    pipeline.setPipelineDefinitionName(pipelineDefinition.getName());

    Pipeline lastPipeline = null;/*from   w  w  w.  ja  v  a2  s.  c om*/
    switch (super.DATABASE_TYPE) {
    case REDIS:
        List<Pipeline> pipelines = (List<Pipeline>) this.getAll().getEntity();
        lastPipeline = pipelines.stream()
                .filter(p -> p.getPipelineDefinitionId().equals(pipeline.getPipelineDefinitionId()))
                .sorted((p1, p2) -> Integer.compare(p2.getExecutionId(), p1.getExecutionId())).findFirst()
                .orElse(null);
        break;
    case MONGODB:
        BasicDBObject query = (BasicDBObject) QueryBuilder.start().put("pipelineDefinitionId")
                .is(pipeline.getPipelineDefinitionId()).get();
        BasicDBObject sortingFiler = new BasicDBObject("executionId", -1);
        Integer skip = 0;
        Integer limit = 1;
        lastPipeline = (Pipeline) ((ArrayList) this.getPipelineMongoService()
                .QueryExecutor(query, sortingFiler, skip, limit).getEntity()).get(0);
        break;
    }

    if (lastPipeline == null) {
        pipeline.setExecutionId(1);
    } else {
        pipeline.setExecutionId(lastPipeline.getExecutionId() + 1);
        pipelineDefinition.setRevisionCount(pipelineDefinition.getRevisionCount() + 1);
        List<EnvironmentVariable> environmentVariables = pipelineDefinition.getEnvironmentVariables();
        EnvironmentVariable environmentVariable = environmentVariables.stream()
                .filter(e -> e.getKey().equals("COUNT")).findFirst().orElse(null);

        int envAutoIncrement = Integer.parseInt(environmentVariable.getValue()) + 1;

        environmentVariable.setValue(String.valueOf(envAutoIncrement));
        environmentVariables.stream().filter(env -> env.getKey().equals(environmentVariable.getKey()))
                .forEach(env -> {
                    env.setValue(environmentVariable.getValue());
                });
        pipelineDefinition.setEnvironmentVariables(environmentVariables);
        ServiceResult result = this.pipelineDefinitionService.update(pipelineDefinition);
    }

    this.addMaterialsToPipeline(pipeline);
    this.addStagesToPipeline(pipeline);

    return super.add(pipeline);
}

From source file:io.hawkcd.services.PipelineService.java

License:Apache License

@Override
@Authorization(scope = PermissionScope.PIPELINE, type = PermissionType.VIEWER)
public ServiceResult getAllByDefinitionId(String pipelineDefinitionId) {
    ServiceResult result = null;//from ww w. ja  v a 2s  .com

    switch (super.DATABASE_TYPE) {
    case REDIS:
        result = this.getAll();
        List<Pipeline> pipelines = (List<Pipeline>) result.getEntity();

        List<Pipeline> filteredPipelines = pipelines.stream()
                .filter(p -> p.getPipelineDefinitionId().equals(pipelineDefinitionId))
                .collect(Collectors.toList());

        result.setEntity(filteredPipelines);
        break;
    case MONGODB:
        BasicDBObject query = (BasicDBObject) QueryBuilder.start().put("pipelineDefinitionId")
                .is(pipelineDefinitionId).get();
        result = this.getPipelineMongoService().QueryExecutor(query);
        break;
    }

    return result;
}