Example usage for com.mongodb.util JSON parse

List of usage examples for com.mongodb.util JSON parse

Introduction

In this page you can find the example usage for com.mongodb.util JSON parse.

Prototype

public static Object parse(final String jsonString) 

Source Link

Document

Parses a JSON string and returns a corresponding Java object.

Usage

From source file:com.socialsky.mods.MongoPersistor.java

License:Apache License

private void runCommand(Message<JsonObject> message) {
    JsonObject reply = new JsonObject();

    String command = getMandatoryString("command", message);

    if (command == null) {
        return;/*from  w  w w.  j av  a  2  s .c om*/
    }

    DBObject commandObject = (DBObject) JSON.parse(command);
    CommandResult result = db.command(commandObject);

    reply.putObject("result", new JsonObject(result.toMap()));
    sendOK(message, reply);
}

From source file:com.socialsky.mods.MongoPersistor.java

License:Apache License

private void runMapReduce(Message<JsonObject> message) {
    JsonObject reply = new JsonObject();

    String command = getMandatoryString("command", message);

    if (command == null) {
        return;/* w  w  w.  j  a  v a  2 s.com*/
    }

    DBObject commandObject = (DBObject) JSON.parse(command);
    CommandResult result = db.command(commandObject);

    reply.putObject("result", new JsonObject(result.toMap()));
    sendOK(message, reply);
}

From source file:com.softinstigate.restheart.db.CollectionDAO.java

License:Open Source License

/**
 * Returns the number of documents in the given collection (taking into
 * account the filters in case)./* www .  ja  va 2 s .c o m*/
 *
 * @param coll the mongodb DBCollection object.
 * @param filters the filters to apply. it is a Deque collection of mongodb
 * query conditions.
 * @return the number of documents in the given collection (taking into
 * account the filters in case)
 */
public static long getCollectionSize(DBCollection coll, Deque<String> filters) {
    final BasicDBObject query = new BasicDBObject(DOCUMENTS_QUERY);

    if (filters != null) {
        try {
            filters.stream().forEach(f -> {
                query.putAll((BSONObject) JSON.parse(f)); // this can throw JSONParseException for invalid filter parameters
            });
        } catch (JSONParseException jpe) {
            logger.warn("****** error parsing filter expression {}", filters, jpe);
        }
    }

    return coll.count(query);
}

From source file:com.softinstigate.restheart.db.CollectionDAO.java

License:Open Source License

/**
 * Returs the documents of the collection applying, sorting, pagination and
 * filtering.//w  w  w  .j av a2s .co  m
 *
 * @param coll the mongodb DBCollection object
 * @param page the page number
 * @param pagesize the size of the page
 * @param sortBy the Deque collection of fields to use for sorting (prepend
 * field name with - for descending sorting)
 * @param filters the filters to apply. it is a Deque collection of mongodb
 * query conditions.
 * @return
 * @throws JSONParseException
 */
public static ArrayList<DBObject> getCollectionData(DBCollection coll, int page, int pagesize,
        Deque<String> sortBy, Deque<String> filters) throws JSONParseException {
    // apply sort_by
    DBObject sort = new BasicDBObject();

    if (sortBy == null || sortBy.isEmpty()) {
        sort.put("_id", 1);
    } else {
        sortBy.stream().forEach((sf) -> {
            sf = sf.replaceAll("_lastupdated_on", "_etag"); // _lastupdated is not stored and actually generated from @tag

            if (sf.startsWith("-")) {
                sort.put(sf.substring(1), -1);
            } else if (sf.startsWith("+")) {
                sort.put(sf.substring(1), -1);
            } else {
                sort.put(sf, 1);
            }
        });
    }

    // apply filter
    final BasicDBObject query = new BasicDBObject(DOCUMENTS_QUERY);

    if (filters != null) {
        filters.stream().forEach((String f) -> {
            BSONObject filterQuery = (BSONObject) JSON.parse(f);
            query.putAll(filterQuery); // this can throw JSONParseException for invalid filter parameters
        });
    }

    ArrayList<DBObject> data = getDataFromCursor(
            coll.find(query).sort(sort).limit(pagesize).skip(pagesize * (page - 1)));

    data.forEach(row -> {
        Object etag = row.get("_etag");

        if (etag != null && ObjectId.isValid("" + etag)) {
            ObjectId _etag = new ObjectId("" + etag);

            row.put("_lastupdated_on", Instant.ofEpochSecond(_etag.getTimestamp()).toString());
        }
    });

    return data;
}

From source file:com.softinstigate.restheart.handlers.injectors.BodyInjectorHandler.java

License:Open Source License

/**
 *
 * @param exchange/*from   w  w  w . ja  va2s.  co m*/
 * @param context
 * @throws Exception
 */
@Override
public void handleRequest(HttpServerExchange exchange, RequestContext context) throws Exception {
    if (context.getMethod() == RequestContext.METHOD.GET || context.getMethod() == RequestContext.METHOD.OPTIONS
            || context.getMethod() == RequestContext.METHOD.DELETE) {
        next.handleRequest(exchange, context);
        return;
    }

    // check content type
    HeaderValues contentTypes = exchange.getRequestHeaders().get(Headers.CONTENT_TYPE);

    if (contentTypes == null || contentTypes.isEmpty() || contentTypes.stream().noneMatch(
            ct -> ct.startsWith(Representation.HAL_JSON_MEDIA_TYPE) || ct.startsWith(JSON_MEDIA_TYPE))) // content type header can be also: Content-Type: application/json; charset=utf-8
    {
        ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_UNSUPPORTED_MEDIA_TYPE,
                "Contet-Type must be either " + Representation.HAL_JSON_MEDIA_TYPE + " or " + JSON_MEDIA_TYPE);
        return;
    }

    String _content = ChannelReader.read(exchange.getRequestChannel());

    DBObject content;

    try {
        content = (DBObject) JSON.parse(_content);
    } catch (JSONParseException ex) {
        ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_NOT_ACCEPTABLE, "invalid data", ex);
        return;
    }

    HashSet<String> keysToRemove = new HashSet<>();

    if (content == null) {
        context.setContent(null);
    } else {
        // filter out reserved keys
        content.keySet().stream().filter(key -> key.startsWith("_") && !key.equals("_id")).forEach(key -> {
            keysToRemove.add(key);
        });

        keysToRemove.stream().map(keyToRemove -> {
            content.removeField(keyToRemove);
            return keyToRemove;
        }).forEach(keyToRemove -> {
            context.addWarning("the reserved field " + keyToRemove + " was filtered out from the request");
        });

        // inject the request content in the context
        context.setContent(content);
    }

    next.handleRequest(exchange, context);
}

From source file:com.softinstigate.restheart.handlers.injectors.RequestContextInjectorHandler.java

License:Open Source License

/**
 *
 * @param exchange//from w  w w  .j  av  a 2 s  .co m
 * @param context
 * @throws Exception
 */
@Override
public void handleRequest(HttpServerExchange exchange, RequestContext context) throws Exception {
    RequestContext rcontext = new RequestContext(exchange, whereUri, whatUri);

    Deque<String> __pagesize = exchange.getQueryParameters().get("pagesize");

    int page = 1; // default page
    int pagesize = 100; // default pagesize

    if (__pagesize != null && !(__pagesize.isEmpty())) {
        try {
            pagesize = Integer.parseInt(__pagesize.getFirst());
        } catch (NumberFormatException ex) {
            ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_BAD_REQUEST,
                    "illegal pagesize paramenter, it is not a number", ex);
            return;
        }
    }

    if (pagesize < 1 || pagesize > 1000) {
        ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_BAD_REQUEST,
                "illegal page parameter, pagesize must be >= 0 and <= 1000");
        return;
    } else {
        rcontext.setPagesize(pagesize);
    }

    Deque<String> __page = exchange.getQueryParameters().get("page");

    if (__page != null && !(__page.isEmpty())) {
        try {
            page = Integer.parseInt(__page.getFirst());
        } catch (NumberFormatException ex) {
            ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_BAD_REQUEST,
                    "illegal page paramenter, it is not a number", ex);
            return;
        }
    }

    if (page < 1) {
        ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_BAD_REQUEST,
                "illegal page paramenter, it is < 1");
        return;
    } else {
        rcontext.setPage(page);
    }

    Deque<String> __count = exchange.getQueryParameters().get("count");

    if (__count != null) {
        rcontext.setCount(true);
    }
    // get and check sort_by parameter
    Deque<String> sort_by = exchange.getQueryParameters().get("sort_by");

    if (sort_by != null) {
        if (sort_by.stream().anyMatch(s -> s == null || s.isEmpty())) {
            ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_BAD_REQUEST,
                    "illegal sort_by paramenter");
            return;
        }

        rcontext.setSortBy(exchange.getQueryParameters().get("sort_by"));
    }

    // get and check filter parameter
    Deque<String> filters = exchange.getQueryParameters().get("filter");

    if (filters != null) {
        if (filters.stream().anyMatch(f -> {
            if (f == null || f.isEmpty()) {
                ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_BAD_REQUEST,
                        "illegal filter paramenter (empty)");
                return true;
            }

            try {
                JSON.parse(f);
            } catch (Throwable t) {
                ResponseHelper.endExchangeWithMessage(exchange, HttpStatus.SC_BAD_REQUEST,
                        "illegal filter paramenter: " + f, t);
                return true;
            }

            return false;
        })) {
            return; // an error occurred
        }

        rcontext.setFilter(exchange.getQueryParameters().get("filter"));
    }

    next.handleRequest(exchange, rcontext);
}

From source file:com.softinstigate.restheart.integrationtest.AbstactIT.java

License:Open Source License

private void createTestData() {
    DBDAO.upsertDB(dbName, dbProps, new ObjectId(), false);
    CollectionDAO.upsertCollection(dbName, collection1Name, coll1Props, new ObjectId(), false, false);
    CollectionDAO.upsertCollection(dbName, collection2Name, coll2Props, new ObjectId(), false, false);
    CollectionDAO.upsertCollection(dbName, docsCollectionName, docsCollectionProps, new ObjectId(), false,
            false);//from  ww w  . j  a  v a 2  s  .c o m

    for (String index : docsCollectionIndexesStrings) {
        IndexDAO.createIndex(dbName, docsCollectionName, ((DBObject) JSON.parse(index)), null);
    }

    DocumentDAO.upsertDocument(dbName, collection1Name, document1Id, document1Props, new ObjectId(), false);
    DocumentDAO.upsertDocument(dbName, collection2Name, document2Id, document2Props, new ObjectId(), false);

    for (String doc : docsPropsStrings) {
        DocumentDAO.upsertDocument(dbName, docsCollectionName, new ObjectId().toString(),
                ((DBObject) JSON.parse(doc)), new ObjectId(), false);
    }
    LOG.info("test data created");
}

From source file:com.strategicgains.docussandra.controller.perf.remote.mongo.MongoLoader.java

License:Apache License

public static void loadMongoData(MongoClientURI uri, final int NUM_WORKERS, Database database,
        final int numDocs, final PerfTestParent clazz) {
    logger.info("------------Loading Data into: " + database.name() + " with MONGO!------------");
    try {// w ww  . ja v a2s. c om
        try {
            MongoClient mongoClient = new MongoClient(uri);
            mongoClient.setWriteConcern(WriteConcern.MAJORITY);
            DB db = mongoClient.getDB(database.name());
            final DBCollection coll = db.getCollection(database.name());
            ArrayList<Thread> workers = new ArrayList<>(NUM_WORKERS + 1);
            int docsPerWorker = numDocs / NUM_WORKERS;
            try {
                List<Document> docs = clazz.getDocumentsFromFS();
                ArrayList<List<Document>> documentQueues = new ArrayList<>(NUM_WORKERS + 1);
                int numDocsAssigned = 0;
                while ((numDocsAssigned + 1) < numDocs) {
                    int start = numDocsAssigned;
                    int end = numDocsAssigned + docsPerWorker;
                    if (end > numDocs) {
                        end = numDocs - 1;
                    }
                    documentQueues.add(new ArrayList(docs.subList(start, end)));
                    numDocsAssigned = end;
                }
                for (final List<Document> queue : documentQueues) {
                    workers.add(new Thread() {
                        @Override
                        public void run() {
                            for (Document d : queue) {
                                DBObject o = (DBObject) JSON.parse(d.object());
                                coll.save(o);
                            }
                            logger.info("Thread " + Thread.currentThread().getName() + " is done. It processed "
                                    + queue.size() + " documents.");
                        }
                    });
                }
            } catch (UnsupportedOperationException e)//we can't read everything in at once
            {
                //all we need to do in this block is find a way to set "workers"
                for (int i = 0; i < NUM_WORKERS; i++) {
                    workers.add(new Thread() {
                        private final int chunk = (int) (Math.random() * 100) + 150;//pick a random chunk so we are not going back to the FS all at the same time and potentially causing a bottle neck

                        @Override
                        public void run() {
                            ThreadLocal<Integer> counter = new ThreadLocal<>();
                            counter.set(new Integer(0));
                            try {
                                List<Document> docs = clazz.getDocumentsFromFS(chunk);//grab a handful of documents
                                while (docs.size() > 0) {
                                    for (Document d : docs)//process the documents we grabbed
                                    {
                                        DBObject o = (DBObject) JSON.parse(d.object());
                                        coll.save(o);
                                        counter.set(counter.get() + 1);
                                    }
                                    docs = clazz.getDocumentsFromFS(chunk);//grab another handful of documents
                                }
                                logger.info("Thread " + Thread.currentThread().getName()
                                        + " is done. It processed " + counter.get() + " documents.");
                            } catch (IOException | ParseException e) {
                                logger.error("Couldn't read from document", e);
                            }
                        }
                    });
                }
            }

            long start = new Date().getTime();
            //start your threads!
            for (Thread t : workers) {
                t.start();
            }
            logger.info("All threads started, waiting for completion.");
            boolean allDone = false;
            boolean first = true;
            while (!allDone || first) {
                first = false;
                boolean done = true;
                for (Thread t : workers) {
                    if (t.isAlive()) {
                        done = false;
                        logger.info("Thread " + t.getName() + " is still running.");
                        break;
                    }
                }
                if (done) {
                    allDone = true;
                } else {
                    logger.info("We still have workers running...");
                    try {
                        Thread.sleep(10000);
                    } catch (InterruptedException e) {
                    }
                }
            }
            long end = new Date().getTime();
            long miliseconds = end - start;
            double seconds = (double) miliseconds / 1000d;
            output.info("Done loading data using: " + NUM_WORKERS + ". Took: " + seconds + " seconds");
            double tpms = (double) numDocs / (double) miliseconds;
            double tps = tpms * 1000;
            double transactionTime = (double) miliseconds / (double) numDocs;
            output.info(database.name() + " Mongo Average Transactions Per Second: " + tps);
            output.info(
                    database.name() + " Mongo Average Transactions Time (in miliseconds): " + transactionTime);

        } catch (UnknownHostException e) {
            logger.error("Couldn't connect to Mongo Server", e);
        }
    } catch (IOException | ParseException e) {
        logger.error("Couldn't read data.", e);
    }
}

From source file:com.stratio.qa.specs.WhenGSpec.java

License:Apache License

/**
 * Execute a query on (mongo) database/*from   w  w w  . jav  a  2  s .c  o m*/
 *
 * @param query         path to query
 * @param type          type of data in query (string or json)
 * @param collection    collection in database
 * @param modifications modifications to perform in query
 */
@When("^I execute a query '(.+?)' of type '(json|string)' in mongo '(.+?)' database using collection '(.+?)' with:$")
public void sendQueryOfType(String query, String type, String database, String collection,
        DataTable modifications) throws Exception {
    try {
        commonspec.setResultsType("mongo");
        String retrievedData = commonspec.retrieveData(query, type);
        String modifiedData = commonspec.modifyData(retrievedData, type, modifications);
        commonspec.getMongoDBClient().connectToMongoDBDataBase(database);
        DBCollection dbCollection = commonspec.getMongoDBClient().getMongoDBCollection(collection);
        DBObject dbObject = (DBObject) JSON.parse(modifiedData);
        DBCursor cursor = dbCollection.find(dbObject);
        commonspec.setMongoResults(cursor);
    } catch (Exception e) {
        commonspec.getExceptions().add(e);
    }
}

From source file:com.stratio.qa.utils.MongoDBUtils.java

License:Apache License

/**
 * Insert document in a MongoDB Collection.
 *
 * @param collection//from   w w w.  ja v  a  2 s .  c o m
 * @param document
 */
public void insertDocIntoMongoDBCollection(String collection, String document) {

    DBObject dbObject = (DBObject) JSON.parse(document);
    this.dataBase.getCollection(collection).insert(dbObject);

}