List of usage examples for com.mongodb.util JSON serialize
public static String serialize(final Object object)
Serializes an object into its JSON form.
This method delegates serialization to JSONSerializers.getLegacy
From source file:org.alfresco.bm.api.v1.ResultsRestAPI.java
License:Open Source License
@GET @Path("/eventResults") @Produces(MediaType.APPLICATION_JSON)/*ww w.j ava 2 s. c o m*/ public String getEventResults( @DefaultValue(ALL_EVENT_NAMES) @QueryParam("filterEventName") String filterEventName, @DefaultValue("All") @QueryParam("filterSuccess") String filterSuccess, @DefaultValue("0") @QueryParam("skipResults") int skipResults, @DefaultValue("10") @QueryParam("numberOfResults") int numberOfResults) { EventResultFilter filter = getFilter(filterSuccess); final ResultService resultService = getResultService(); String nameFilterString = filterEventName.equals(ALL_EVENT_NAMES) ? "" : filterEventName; // get event details List<EventDetails> details = resultService.getEventDetails(filter, nameFilterString, skipResults, numberOfResults); // serialize back .... BasicDBList retList = new BasicDBList(); for (EventDetails detail : details) { retList.add(detail.toDBObject()); } return JSON.serialize(retList); }
From source file:org.alfresco.bm.api.v1.StatusAPI.java
License:Open Source License
@GET @Path("/logs") @Produces(MediaType.APPLICATION_JSON)/* w w w.ja v a 2 s . c o m*/ public String getLogs(@QueryParam("driverId") String driverId, @QueryParam("test") String test, @QueryParam("run") String run, @DefaultValue("INFO") @QueryParam("level") String levelStr, @DefaultValue("0") @QueryParam("from") Long from, @DefaultValue("" + Long.MAX_VALUE) @QueryParam("to") Long to, @DefaultValue("0") @QueryParam("skip") int skip, @DefaultValue("50") @QueryParam("count") int count) { if (logger.isDebugEnabled()) { logger.debug("Inbound: " + "[driverId:" + driverId + ",test:" + test + ",run:" + run + ",level:" + levelStr + ",from:" + new Date(from) + ",to:" + new Date(to) + ",skip:" + skip + ",count:" + count + "]"); } LogLevel level = LogLevel.INFO; try { level = LogLevel.valueOf(levelStr); } catch (Exception e) { // Just allow this } DBCursor cursor = null; try { String json = "[]"; cursor = logService.getLogs(driverId, test, run, level, from, to, skip, count); if (cursor.count() > 0) { json = JSON.serialize(cursor); } if (logger.isDebugEnabled()) { logger.debug("Outbound: " + json); } return json; } catch (WebApplicationException e) { throw e; } catch (Exception e) { throwAndLogException(Status.INTERNAL_SERVER_ERROR, e); return null; } finally { if (cursor != null) { try { cursor.close(); } catch (Exception e) { } } } }
From source file:org.apache.camel.component.gridfs.GridFsConsumer.java
License:Apache License
@Override public void run() { DBCursor c = null;//from ww w.j a v a2s .c o m java.util.Date fromDate = null; QueryStrategy s = endpoint.getQueryStrategy(); boolean usesTimestamp = (s != QueryStrategy.FileAttribute); boolean persistsTimestamp = (s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute); boolean usesAttribute = (s == QueryStrategy.FileAttribute || s == QueryStrategy.TimeStampAndFileAttribute || s == QueryStrategy.PersistentTimestampAndFileAttribute); DBCollection ptsCollection = null; DBObject persistentTimestamp = null; if (persistsTimestamp) { ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection()); // ensure standard indexes as long as collections are small try { if (ptsCollection.count() < 1000) { ptsCollection.createIndex(new BasicDBObject("id", 1)); } } catch (MongoException e) { //TODO: Logging } persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject())); if (persistentTimestamp == null) { persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject()); fromDate = new java.util.Date(); persistentTimestamp.put("timestamp", fromDate); ptsCollection.save(persistentTimestamp); } fromDate = (java.util.Date) persistentTimestamp.get("timestamp"); } else if (usesTimestamp) { fromDate = new java.util.Date(); } try { Thread.sleep(endpoint.getInitialDelay()); while (isStarted()) { if (c == null || c.getCursorId() == 0) { if (c != null) { c.close(); } String queryString = endpoint.getQuery(); DBObject query; if (queryString == null) { query = new BasicDBObject(); } else { query = (DBObject) JSON.parse(queryString); } if (usesTimestamp) { query.put("uploadDate", new BasicDBObject("$gt", fromDate)); } if (usesAttribute) { query.put(endpoint.getFileAttributeName(), null); } c = endpoint.getFilesCollection().find(query); } boolean dateModified = false; while (c.hasNext() && isStarted()) { GridFSDBFile file = (GridFSDBFile) c.next(); GridFSDBFile forig = file; if (usesAttribute) { file.put(endpoint.getFileAttributeName(), "processing"); DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null) .get(); forig = (GridFSDBFile) endpoint.getFilesCollection().findAndModify(q, null, null, false, file, true, false); } if (forig != null) { file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId())); Exchange exchange = endpoint.createExchange(); exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData())); exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType()); exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength()); exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate()); exchange.getIn().setBody(file.getInputStream(), InputStream.class); try { getProcessor().process(exchange); //System.out.println("Processing " + file.getFilename()); if (usesAttribute) { forig.put(endpoint.getFileAttributeName(), "done"); endpoint.getFilesCollection().save(forig); } if (usesTimestamp) { if (file.getUploadDate().compareTo(fromDate) > 0) { fromDate = file.getUploadDate(); dateModified = true; } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } if (persistsTimestamp && dateModified) { persistentTimestamp.put("timestamp", fromDate); ptsCollection.save(persistentTimestamp); } Thread.sleep(endpoint.getDelay()); } } catch (Throwable e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (c != null) { c.close(); } }
From source file:org.apache.camel.component.gridfs.GridFsProducer.java
License:Apache License
public void process(Exchange exchange) throws Exception { String operation = endpoint.getOperation(); if (operation == null) { operation = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_OPERATION, String.class); }// ww w . ja v a 2s .c om if (operation == null || "create".equals(operation)) { final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class); Long chunkSize = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_CHUNKSIZE, Long.class); InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class); GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true); if (chunkSize != null && chunkSize > 0) { gfsFile.setChunkSize(chunkSize); } final String ct = exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class); if (ct != null) { gfsFile.setContentType(ct); } String metaData = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_METADATA, String.class); DBObject dbObject = (DBObject) JSON.parse(metaData); gfsFile.setMetaData(dbObject); gfsFile.save(); exchange.getIn().setHeader(Exchange.FILE_NAME_PRODUCED, gfsFile.getFilename()); } else if ("remove".equals(operation)) { final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class); endpoint.getGridFs().remove(filename); } else if ("findOne".equals(operation)) { final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class); GridFSDBFile file = endpoint.getGridFs().findOne(filename); if (file != null) { exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData())); exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType()); exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength()); exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate()); exchange.getIn().setBody(file.getInputStream(), InputStream.class); } else { throw new FileNotFoundException("No GridFS file for " + filename); } } else if ("listAll".equals(operation)) { final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class); DBCursor cursor; if (filename == null) { cursor = endpoint.getGridFs().getFileList(); } else { cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename)); } exchange.getIn().setBody(new DBCursorFilenameReader(cursor), Reader.class); } else if ("count".equals(operation)) { final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class); DBCursor cursor; if (filename == null) { cursor = endpoint.getGridFs().getFileList(); } else { cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename)); } exchange.getIn().setBody(cursor.count(), Integer.class); } }
From source file:org.apache.jmeter.protocol.mongodb.mongo.EvalResultHandler.java
License:Apache License
public String handle(DBObject o) { return JSON.serialize(o); }
From source file:org.apereo.openlrs.model.event.Event.java
License:Educational Community License
public String getRaw() { if (raw != null) { return JSON.serialize(raw); } return toJSON(); }
From source file:org.axonframework.mongo.serialization.DBObjectToStringContentTypeConverter.java
License:Apache License
@Override public String convert(DBObject original) { return JSON.serialize(original); }
From source file:org.bananaforscale.cormac.dao.database.DatabaseDataServiceImpl.java
License:Apache License
/** * Returns statistics that reflect the use state of a single database. * * @param databaseName the database//from w ww .j av a 2s .c om * @return A JSON string with statistics reflecting the database state. * @throws DatasourceException * @throws NotFoundException */ @Override public String getDatabaseStats(String databaseName) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } // TODO: Find equivalent in java api for MongoDatabase DB mongoDatabase = mongoClient.getDB(databaseName); DBObject statsObject = mongoDatabase.getStats(); return JSON.serialize(statsObject); } catch (MongoException ex) { logger.error("An error occured while retrieving database stats", ex); throw new DatasourceException("An error occured while retrieving database stats"); } }
From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java
License:Apache License
/** * Returns all the documents in a collection. * * @param databaseName the database/*from w w w .jav a2 s . co m*/ * @param collectionName the collection * @param query a JSON query param in the style of mongo * @param fields fields to return * @param skip the amount of documents to skip * @param limit the amount of documents to limit the result to * @param orderBy order ascending or descending by property * @param includeId determines whether to include the Mongo "_id" field * @return the documents in a collection * @throws DatasourceException * @throws NotFoundException */ @Override public List<String> getAll(String databaseName, String collectionName, String query, String fields, String skip, String limit, String orderBy, boolean includeId) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (!collectionExists(databaseName, collectionName)) { throw new NotFoundException("The collection doesn't exist in the datasource"); } Integer intSkip, intLimit; try { intSkip = Integer.parseInt(skip); } catch (NumberFormatException ex) { intSkip = 0; } try { intLimit = Integer.parseInt(limit); } catch (NumberFormatException ex) { intLimit = 0; } // 1 or -1 to specify an ascending or descending sort respectively. Document orderByObject = null; if (orderBy != null && !orderBy.isEmpty()) { if (orderBy.contains("ascending")) { String[] parts = orderBy.split(":"); orderByObject = new Document(parts[0], 1); } else if (orderBy.contains("descending")) { String[] parts = orderBy.split(":"); orderByObject = new Document(parts[0], -1); } } MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); MongoCollection collection = mongoDatabase.getCollection(collectionName); FindIterable iterable = (query == null || query.isEmpty()) ? collection.find() : collection.find(Document.parse(query)); // TODO: Figure out how to do this in new API // if (fields != null && !fields.isEmpty()) { // // expect the form to be field:value,field:value // Document document = new Document(); // String[] parts = fields.split(","); // for (String part : parts) { // String[] tempParts = part.split(":"); // document.append(tempParts[0], tempParts[1]); // } // iterable.projection(document); // } iterable.skip(intSkip); iterable.limit(intLimit); if (orderByObject != null) { iterable.sort(orderByObject); } Iterator<Document> curIter = iterable.iterator(); List<String> documentList = new ArrayList<>(); while (curIter.hasNext()) { Document current = curIter.next(); if (!includeId) { current.remove("_id"); } documentList.add(JSON.serialize(current)); } return documentList; } catch (MongoException ex) { logger.error("An error occured while retrieving the document list", ex); throw new DatasourceException("An error occured while retrieving the document list"); } }
From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java
License:Apache License
/** * Returns the document of the given document identifier. * * @param databaseName the database//w ww. ja va 2 s . c om * @param collectionName the collection * @param documentId the document identifier to query for * @return the document of the given identifier * @throws DatasourceException * @throws NotFoundException */ @Override public String getById(String databaseName, String collectionName, String documentId) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (!collectionExists(databaseName, collectionName)) { throw new NotFoundException("The collection doesn't exist in the datasource"); } MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mongoDatabase.getCollection(collectionName); Document query = new Document("_id", new ObjectId(documentId)); if (collection.count(query) == 0) { throw new NotFoundException("The document doesn't exist in the datasource"); } Document document = collection.find(query).first(); document.remove("_id"); return JSON.serialize(document); } catch (MongoException ex) { logger.error("An error occured while retrieving the document", ex); throw new DatasourceException("An error occured while retrieving the document"); } }