List of usage examples for com.mongodb DBCollection findOne
@Nullable public DBObject findOne(final Object id)
From source file:nl.knaw.huygens.timbuctoo.storage.mongo.MongoStorage.java
License:Open Source License
private <T extends Entity> void addVersion(Class<T> type, String id, JsonNode tree) throws StorageException { DBCollection collection = getVersionCollection(type); DBObject query = queries.selectById(id); if (collection.findOne(query) == null) { ObjectNode node = objectMapper.createObjectNode(); node.put("_id", id); node.put("versions", objectMapper.createArrayNode()); mongoDB.insert(collection, id, toDBObject(node)); }/*from w ww.j a v a2s. c o m*/ ObjectNode versionNode = objectMapper.createObjectNode(); versionNode.put("versions", tree); ObjectNode update = objectMapper.createObjectNode(); update.put("$push", versionNode); mongoDB.update(collection, query, toDBObject(update)); }
From source file:org.alfresco.extension.wcmdeployment.mongodb.MongoDbDeploymentTarget.java
License:Open Source License
private DBObject findByPath(final DBCollection collection, final String path) { DBObject result = null;/* w w w . jav a 2 s .c om*/ if (collection != null && path != null) { DBObject pathQuery = new BasicDBObject(); collection.ensureIndex("path"); // Make sure we index path, so that listings are efficient pathQuery.put("path", path); result = collection.findOne(pathQuery); } return (result); }
From source file:org.alfresco.extension.wcmdeployment.mongodb.MongoDbDeploymentTarget.java
License:Open Source License
private DBObject findVersionDoc(final DB database) { DBObject result = null;/*from w w w .ja va 2s. c om*/ if (database != null) { DBCollection collection = database.getCollection("deploymentSystem"); result = collection.findOne("version"); } return (result); }
From source file:org.apache.camel.component.gridfs.GridFsConsumer.java
License:Apache License
@Override public void run() { DBCursor c = null;//from w w w.j ava 2s . c om java.util.Date fromDate = null; QueryStrategy s = endpoint.getQueryStrategy(); boolean usesTimestamp = (s != QueryStrategy.FileAttribute); boolean persistsTimestamp = (s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute); boolean usesAttribute = (s == QueryStrategy.FileAttribute || s == QueryStrategy.TimeStampAndFileAttribute || s == QueryStrategy.PersistentTimestampAndFileAttribute); DBCollection ptsCollection = null; DBObject persistentTimestamp = null; if (persistsTimestamp) { ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection()); // ensure standard indexes as long as collections are small try { if (ptsCollection.count() < 1000) { ptsCollection.createIndex(new BasicDBObject("id", 1)); } } catch (MongoException e) { //TODO: Logging } persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject())); if (persistentTimestamp == null) { persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject()); fromDate = new java.util.Date(); persistentTimestamp.put("timestamp", fromDate); ptsCollection.save(persistentTimestamp); } fromDate = (java.util.Date) persistentTimestamp.get("timestamp"); } else if (usesTimestamp) { fromDate = new java.util.Date(); } try { Thread.sleep(endpoint.getInitialDelay()); while (isStarted()) { if (c == null || c.getCursorId() == 0) { if (c != null) { c.close(); } String queryString = endpoint.getQuery(); DBObject query; if (queryString == null) { query = new BasicDBObject(); } else { query = (DBObject) JSON.parse(queryString); } if (usesTimestamp) { query.put("uploadDate", new BasicDBObject("$gt", fromDate)); } if (usesAttribute) { query.put(endpoint.getFileAttributeName(), null); } c = endpoint.getFilesCollection().find(query); } boolean dateModified = false; while (c.hasNext() && isStarted()) { GridFSDBFile file = (GridFSDBFile) c.next(); GridFSDBFile forig = file; if (usesAttribute) { file.put(endpoint.getFileAttributeName(), "processing"); DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null) .get(); forig = (GridFSDBFile) endpoint.getFilesCollection().findAndModify(q, null, null, false, file, true, false); } if (forig != null) { file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId())); Exchange exchange = endpoint.createExchange(); exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData())); exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType()); exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength()); exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate()); exchange.getIn().setBody(file.getInputStream(), InputStream.class); try { getProcessor().process(exchange); //System.out.println("Processing " + file.getFilename()); if (usesAttribute) { forig.put(endpoint.getFileAttributeName(), "done"); endpoint.getFilesCollection().save(forig); } if (usesTimestamp) { if (file.getUploadDate().compareTo(fromDate) > 0) { fromDate = file.getUploadDate(); dateModified = true; } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } if (persistsTimestamp && dateModified) { persistentTimestamp.put("timestamp", fromDate); ptsCollection.save(persistentTimestamp); } Thread.sleep(endpoint.getDelay()); } } catch (Throwable e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (c != null) { c.close(); } }
From source file:org.apache.camel.component.mongodb.MongoDbProducer.java
License:Apache License
protected void doFindById(Exchange exchange) throws Exception { DBCollection dbCol = calculateCollection(exchange); Object o = exchange.getIn().getMandatoryBody(); DBObject ret;//from w w w .jav a2 s . c o m DBObject fieldFilter = exchange.getIn().getHeader(MongoDbConstants.FIELDS_FILTER, DBObject.class); if (fieldFilter == null) { ret = dbCol.findOne(o); } else { ret = dbCol.findOne(o, fieldFilter); } Message resultMessage = prepareResponseMessage(exchange, MongoDbOperation.save); resultMessage.setBody(ret); resultMessage.setHeader(MongoDbConstants.RESULT_TOTAL_SIZE, ret == null ? 0 : 1); }
From source file:org.apache.camel.component.mongodb.MongoDbProducer.java
License:Apache License
protected void doFindOneByQuery(Exchange exchange) throws Exception { DBCollection dbCol = calculateCollection(exchange); DBObject o = exchange.getIn().getMandatoryBody(DBObject.class); DBObject ret;// ww w . ja v a2 s .c om DBObject fieldFilter = exchange.getIn().getHeader(MongoDbConstants.FIELDS_FILTER, DBObject.class); if (fieldFilter == null) { ret = dbCol.findOne(o); } else { ret = dbCol.findOne(o, fieldFilter); } Message resultMessage = prepareResponseMessage(exchange, MongoDbOperation.findOneByQuery); resultMessage.setBody(ret); resultMessage.setHeader(MongoDbConstants.RESULT_TOTAL_SIZE, ret == null ? 0 : 1); }
From source file:org.apache.chemistry.opencmis.mongodb.MongodbUtils.java
License:Apache License
public void moveNode(BasicDBObject node, BasicDBObject newParent, DBCollection collection) { // Get the left and right values Long originalLeft = node.getLong("left"); Long originalRight = node.getLong("right"); Long subtreeWidth = originalRight - originalLeft; // Compute the new left and right values for the nodeToMove Long newLeft = newParent.getLong("right"); Long newRight = newParent.getLong("right") + subtreeWidth; // Make space for the new subtree under the new parent collection.update(// w ww .j a va 2 s . c o m new BasicDBObject().append("right", new BasicDBObject().append("$gte", newParent.get("right"))), new BasicDBObject().append("$inc", new BasicDBObject().append("right", subtreeWidth + 1)), false, true); collection.update( new BasicDBObject().append("left", new BasicDBObject().append("$gte", newParent.get("right"))), new BasicDBObject().append("$inc", new BasicDBObject().append("left", subtreeWidth + 1)), false, true); // Re-fetch the node to move, since the left and right values may have changed node = (BasicDBObject) collection.findOne(new BasicDBObject().append("_id", node.get("_id"))); Long difference = node.getLong("left") - newLeft; // Move the old subtree into a new location collection.update( new BasicDBObject().append("left", new BasicDBObject().append("$gte", node.getLong("left"))) .append("right", new BasicDBObject().append("$lte", node.getLong("right"))), new BasicDBObject().append("$inc", new BasicDBObject().append("left", 0 - difference).append("right", 0 - difference)), false, true); // Remove empty space from the parent //db.test.update({left:nodeToMove.left-1, right:nodeToMove.right+1}, {right:nodeToMove.left}); collection.update(new BasicDBObject().append("right", new BasicDBObject().append("$gte", node.get("left"))), new BasicDBObject().append("$inc", new BasicDBObject().append("right", 0 - subtreeWidth - 1)), false, true); collection.update(new BasicDBObject().append("left", new BasicDBObject().append("$gte", node.get("left"))), new BasicDBObject().append("$inc", new BasicDBObject().append("left", 0 - subtreeWidth - 1)), false, true); }
From source file:org.apache.chemistry.opencmis.mongodb.MongodbUtils.java
License:Apache License
public BasicDBObject getNodeByPath(String path, DBCollection collection) { String[] pathSplit = path.split(PATH_SEPARATOR); int pathDepth = pathSplit.length - 1; DBCursor candidates = collection// w w w . jav a2 s . c o m .find(new BasicDBObject().append("title", pathSplit[pathDepth]).append("level", "pathDepth")); // if number of candidates is one, then we have found our content item - return it if (candidates.size() == 1) { return (BasicDBObject) candidates.next(); } // if number of candidates is greater than the path depth, then just follow the depth instead if (candidates.size() >= pathSplit.length) { int currentLevel = 1; DBObject currentNode = collection.findOne(new BasicDBObject().append("title", "root")); while (currentLevel <= pathDepth && currentNode != null) { currentNode = collection.findOne( new BasicDBObject().append("left", new BasicDBObject("$gt", currentNode.get("left"))) .append("right", new BasicDBObject("$gt", currentNode.get("right"))) .append("title", pathSplit[currentLevel]).append("level", currentLevel)); currentLevel++; } return (BasicDBObject) currentNode; } else { int pathLevel = pathDepth - 1; DBObject candidate = null; while (candidates.hasNext() && pathLevel > 0) { candidate = candidates.next(); DBCursor ancestors = collection .find(new BasicDBObject("left", new BasicDBObject().append("$lt", candidate.get("left"))) .append("right", new BasicDBObject().append("$gt", candidate.get("right")))) .sort(new BasicDBObject("left", 1)); DBObject ancestor = ancestors.next(); while (ancestor != null && ancestor.get("title").equals(pathSplit[pathLevel]) && pathLevel > -1) { ancestor = ancestors.next(); pathLevel--; } } return (BasicDBObject) candidate; } }
From source file:org.apache.isis.objectstore.nosql.db.mongo.MongoClientCommandContext.java
License:Apache License
@Override public void delete(final ObjectSpecId objectSpecId, final String mongoId, final String version, final Oid oid) { final DBCollection instances = db.getCollection(objectSpecId.asString()); final DBObject object = instances.findOne(mongoId); if (!object.get(PropertyNames.VERSION).equals(version)) { throw new ConcurrencyException("Could not delete object of different version", oid); }/*from w ww .j a v a 2 s. c om*/ instances.remove(object); LOG.info("removed " + oid); }
From source file:org.apache.isis.objectstore.nosql.db.mongo.MongoDb.java
License:Apache License
@Override public String getService(final ObjectSpecId objectSpecId) { final DBCollection services = db.getCollection("services"); final DBObject object = services.findOne(new BasicDBObject().append("name", objectSpecId.asString())); if (object == null) { return null; } else {/* www. j a v a 2 s.c om*/ final String id = (String) object.get("key"); LOG.info("service found " + objectSpecId + ":" + id); return id; } }