Example usage for com.mongodb QueryBuilder start

List of usage examples for com.mongodb QueryBuilder start

Introduction

In this page you can find the example usage for com.mongodb QueryBuilder start.

Prototype

public static QueryBuilder start() 

Source Link

Document

Returns a new QueryBuilder.

Usage

From source file:org.ossmeter.platform.client.api.SparkResource.java

License:Open Source License

public Representation doRepresent() {
    // Check cache
    String sd = SparkCache.getSparkCache().getSparkData(getRequest().getResourceRef().toString());

    if (sd != null) {
        JsonNode obj;/*from   www . ja  v a 2  s.  c o m*/
        try {
            System.out.println("SD: " + sd);
            obj = mapper.readTree(sd);
            return Util.createJsonRepresentation(obj);
        } catch (Exception e) {
            e.printStackTrace(); // FIXME
            getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
            ObjectNode node = mapper.createObjectNode();
            node.put("status", "error");
            node.put("msg", "Error whilst retrieving sparkline.");
            return Util.createJsonRepresentation(node);
        }
    }

    // Miss. Hit database.
    String projectId = (String) getRequest().getAttributes().get("projectid");
    String metric = (String) getRequest().getAttributes().get("metricid");

    String[] metrics = metric.split("\\+");
    System.err.println("metrics to get: " + metrics);
    ArrayNode sparks = mapper.createArrayNode();
    for (String metricId : metrics) {

        String agg = getQueryValue("agg");
        String start = getQueryValue("startDate");
        String end = getQueryValue("endDate");

        QueryBuilder builder = QueryBuilder.start();
        if (agg != null && agg != "") {
            //         builder.... // TODO
        }
        try {
            if (start != null && start != "") {
                builder.and("__datetime").greaterThanEquals(new Date(start).toJavaDate());
            }
            if (end != null && end != "") {
                builder.and("__datetime").lessThanEquals(new Date(end).toJavaDate());
            }
        } catch (ParseException e) {
            getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
            ObjectNode node = mapper.createObjectNode();
            node.put("status", "error");
            node.put("msg", "Invalid date. Format must be YYYYMMDD.");
            node.put("request", generateRequestJson(projectId, metricId));
            return Util.createJsonRepresentation(node);
        }

        BasicDBObject query = (BasicDBObject) builder.get();

        ProjectRepository projectRepo = platform.getProjectRepositoryManager().getProjectRepository();
        Project project = projectRepo.getProjects().findOneByShortName(projectId);
        if (project == null) {
            getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
            return Util.createJsonRepresentation(
                    generateErrorMessage(mapper, "No project matched that requested.", projectId, metricId));

        }

        MetricVisualisationExtensionPointManager manager = MetricVisualisationExtensionPointManager
                .getInstance();
        Map<String, MetricVisualisation> registeredVisualisations = manager.getRegisteredVisualisations();
        System.out.println("registered visualisations: " + registeredVisualisations.keySet());
        MetricVisualisation vis = manager.findVisualisationById(metricId);

        if (vis == null) {
            getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
            return Util.createJsonRepresentation(generateErrorMessage(mapper,
                    "No visualiser found with specified ID.", projectId, metricId));
        }

        DB db = platform.getMetricsRepository(project).getDb();

        System.setProperty("java.awt.headless", "true");

        byte[] sparky;
        try {
            sparky = vis.getSparky(db, query);
            ObjectNode sparkData = vis.getSparkData();

            if (sparky != null) {
                String uuid = UUID.randomUUID().toString();
                SparkCache.getSparkCache().putSpark(uuid, sparky);
                sparkData.put("spark", "/spark/" + uuid);
            }
            sparkData.put("metricId", metricId);
            sparkData.put("projectId", projectId);

            // And add to the return list
            sparks.add(sparkData);
        } catch (ParseException e) {
            //            getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
            // TODO Log this as series - needs investigating by admin
            sparks.add(generateErrorMessage(mapper, "Error whilst generating sparkle. Unable to parse data.",
                    projectId, metricId));
        } catch (UnsparkableVisualisationException e) {
            //            getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
            sparks.add(generateErrorMessage(mapper,
                    "Visualisation not sparkable. Metrics must be time series in order to be sparkable.",
                    projectId, metricId));
        } catch (IOException e) {
            e.printStackTrace(); // FIXME
            //            getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
            sparks.add(generateErrorMessage(mapper, "Error whilst generating sparkle.", projectId, metricId));
        }
    }

    // Put in the cache

    if (sparks.size() == 1) {
        SparkCache.getSparkCache().putSparkData(getRequest().getResourceRef().toString(),
                (ObjectNode) sparks.get(0));
        return Util.createJsonRepresentation(sparks.get(0));
    } else {
        SparkCache.getSparkCache().putSparkData(getRequest().getResourceRef().toString(), sparks);
        return Util.createJsonRepresentation(sparks);
    }
}

From source file:org.pentaho.di.trans.dataservice.optimization.mongod.MongodbPredicate.java

License:Apache License

protected DBObject conditionAsDBObject() throws PushDownOptimizationException {
    return buildMongoCondition(condition, QueryBuilder.start()).get();
}

From source file:org.pentaho.di.trans.dataservice.optimization.mongod.MongodbPredicate.java

License:Apache License

private DBObject[] conditionListToDBObjectArray(List<Condition> conditions)
        throws PushDownOptimizationException {
    BasicDBList basicDbList = new BasicDBList();
    for (Condition condition : conditions) {
        QueryBuilder childContainer = QueryBuilder.start();
        buildMongoCondition(condition, childContainer);
        basicDbList.add(childContainer.get());
    }//from www  .  j  a v  a  2  s  .co  m
    return basicDbList.toArray(new DBObject[basicDbList.size()]);
}