List of usage examples for com.mongodb DBObject containsField
boolean containsField(String s);
From source file:org.hibernate.ogm.datastore.mongodb.index.impl.MongoDBIndexSpec.java
License:LGPL
/** * Prepare the options by adding additional information to them. *//*from w w w . ja v a2s . c o m*/ private DBObject prepareOptions(DBObject options, String indexName, boolean unique) { options.put("name", indexName); if (unique) { options.put("unique", true); // MongoDB only allows one null value per unique index which is not in line with what we usually consider // as the definition of a unique constraint. Thus, we mark the index as sparse to only index values // defined and avoid this issue. We do this only if a partialFilterExpression has not been defined // as partialFilterExpression and sparse are exclusive. if (!options.containsField("partialFilterExpression")) { options.put("sparse", true); } } if (Boolean.TRUE.equals(options.get("text"))) { // text is an option we take into account to mark an index as a full text index as we cannot put "text" as // the order like MongoDB as ORM explicitely checks that the order is either asc or desc. // we remove the option from the DBObject so that we don't pass it to MongoDB isTextIndex = true; options.removeField("text"); } return options; }
From source file:org.jongo.Update.java
License:Apache License
private void removeIdField(DBObject updateDbo) { DBObject pojoAsDbo = (DBObject) updateDbo.get("$set"); if (pojoAsDbo.containsField("_id")) { // Need to materialize lazy objects which are read only if (pojoAsDbo instanceof LazyBSONObject) { BasicDBObject expanded = new BasicDBObject(); expanded.putAll(pojoAsDbo);// www . j a v a2 s . c o m updateDbo.put("$set", expanded); pojoAsDbo = expanded; } pojoAsDbo.removeField("_id"); } }
From source file:org.jspresso.framework.model.persistence.mongo.JspressoEntityReadConverter.java
License:Open Source License
@SuppressWarnings("unchecked") private void completeComponent(DBObject source, IComponentDescriptor<? extends IComponent> entityDescriptor, IComponent component, IEntityRegistry readerRegistry) { Class<? extends IComponent> componentContract = component.getComponentContract(); for (IPropertyDescriptor propertyDescriptor : entityDescriptor.getPropertyDescriptors()) { if (propertyDescriptor != null && !propertyDescriptor.isComputed()) { String propertyName = propertyDescriptor.getName(); Class<?> propertyType = propertyDescriptor.getModelType(); String convertedPropertyName = getConverter().getMappingContext() .getPersistentEntity(componentContract) .getPersistentProperty(PropertyHelper.toJavaBeanPropertyName(propertyName)).getFieldName(); if (source.containsField(convertedPropertyName)) { Object propertyValue = source.get(convertedPropertyName); Class<?> componentRefType = null; if (propertyDescriptor instanceof IRelationshipEndPropertyDescriptor) { if (propertyDescriptor instanceof IReferencePropertyDescriptor<?>) { componentRefType = ((IReferencePropertyDescriptor<?>) propertyDescriptor) .getReferencedDescriptor().getModelType(); } else if (propertyDescriptor instanceof ICollectionPropertyDescriptor<?>) { componentRefType = ((ICollectionPropertyDescriptor<?>) propertyDescriptor) .getCollectionDescriptor().getElementDescriptor().getModelType(); }/* w w w . ja v a 2 s .c o m*/ } if (propertyValue instanceof DBObject) { if (propertyValue instanceof BasicDBList) { if (propertyDescriptor instanceof ICollectionPropertyDescriptor<?>) { Class<? extends Collection<?>> collectionInterface = ((ICollectionPropertyDescriptor) propertyDescriptor) .getCollectionDescriptor().getCollectionInterface(); if (IComponent.class.isAssignableFrom(componentRefType)) { if (IEntity.class.isAssignableFrom(componentRefType)) { Collection<Serializable> collectionProperty = getCollectionFactory() .createComponentCollection(collectionInterface); for (Object element : (BasicDBList) propertyValue) { collectionProperty.add((Serializable) element); } component.straightSetProperty(propertyName, createProxyCollection(collectionProperty, (Class<IEntity>) componentRefType, collectionInterface)); } else { Collection<Object> collectionProperty = getCollectionFactory() .createComponentCollection(collectionInterface); for (Object element : (BasicDBList) propertyValue) { if (element instanceof DBObject) { collectionProperty.add(convertComponent((DBObject) element, (Class<? extends IComponent>) componentRefType, readerRegistry)); } } component.straightSetProperty(propertyName, collectionProperty); } } else { Collection<Object> collectionProperty = getCollectionFactory() .createComponentCollection(collectionInterface); for (Object element : (BasicDBList) propertyValue) { collectionProperty.add(element); } component.straightSetProperty(propertyName, collectionProperty); } } else { component.straightSetProperty(propertyName, propertyValue); } } else if (propertyDescriptor instanceof IReferencePropertyDescriptor<?>) { component.straightSetProperty(propertyName, convertComponent((DBObject) propertyValue, (Class<? extends IComponent>) componentRefType, readerRegistry)); } else { Object convertedPropertyValue = getConverter().read(propertyType, (DBObject) propertyValue); component.straightSetProperty(propertyName, convertedPropertyValue); } } else if (componentRefType != null && propertyValue instanceof Serializable) { component.straightSetProperty(propertyName, convertEntity((Serializable) propertyValue, (Class<IEntity>) componentRefType, readerRegistry)); } else { Object convertedPropertyValue = getConverter().getConversionService().convert(propertyValue, propertyType); component.straightSetProperty(propertyName, convertedPropertyValue); } } } } }
From source file:org.lucee.mongodb.DBCollectionImpl.java
License:Open Source License
@Override public Object call(PageContext pc, Key methodName, Object[] args) throws PageException { // aggregate/* w w w . j a v a2 s . c om*/ if (methodName.equals("aggregate")) { boolean hasOptions = false; AggregationOptions options = null; int len = checkArgLength("aggregate", args, 1, -1); // no length limitation List<DBObject> pipeline = new ArrayList<DBObject>(); // Pipeline array as single argument if (len == 1 && decision.isArray(args[0])) { Array arr = caster.toArray(args[0]); if (arr.size() == 0) throw exp.createApplicationException( "the array passed to the function aggregate needs at least 1 element"); Iterator<Object> it = arr.valueIterator(); while (it.hasNext()) { pipeline.add(toDBObject(it.next())); } } else { // First argument is pipeline of operations, second argument is struct of options --> returns cursor! if (len == 2 && decision.isArray(args[0]) && decision.isStruct(args[1])) { Array arr = caster.toArray(args[0]); Iterator<Object> it = arr.valueIterator(); while (it.hasNext()) { pipeline.add(toDBObject(it.next())); } hasOptions = true; // options builder AggregationOptions.Builder optbuilder = AggregationOptions.builder() .outputMode(AggregationOptions.OutputMode.CURSOR); DBObject dboOpts = toDBObject(args[1]); if (dboOpts.containsField("allowDiskUse")) { if (!decision.isBoolean(dboOpts.get("allowDiskUse"))) throw exp.createApplicationException("allowDiskUse in options must be boolean value"); optbuilder = optbuilder.allowDiskUse(caster.toBooleanValue(dboOpts.get("allowDiskUse"))); } if (dboOpts.containsField("cursor")) { if (!decision.isStruct(dboOpts.get("cursor"))) throw exp.createApplicationException( "cursor in options must be struct with optional key batchSize"); DBObject cursoropts = toDBObject(dboOpts.get("cursor")); if (cursoropts.containsField("batchSize")) { if (!decision.isNumeric(cursoropts.get("batchSize"))) throw exp.createApplicationException("cursor.batchSize in options must be integer"); optbuilder = optbuilder.batchSize(caster.toIntValue(cursoropts.get("batchSize"))); } } options = optbuilder.build(); } // First argument is first operation, second argument is array of additional operations else if (len == 2 && decision.isArray(args[1])) { Array arr = caster.toArray(args[1]); pipeline.add(toDBObject(args[0])); Iterator<Object> it = arr.valueIterator(); while (it.hasNext()) { pipeline.add(toDBObject(it.next())); } } // N arguments of pipeline operations else { for (int i = 0; i < len; i++) { pipeline.add(toDBObject(args[i])); } } } if (hasOptions) { // returns Cursor - requires >= MongoDB 2.6 return toCFML(coll.aggregate(pipeline, options)); } else { // returns AggregationOutput return toCFML(coll.aggregate(pipeline)); } } // count if (methodName.equals("count")) { int len = checkArgLength("count", args, 0, 1); if (len == 0) { return toCFML(coll.count()); } else if (len == 1) { return toCFML(coll.count(toDBObject(args[0]))); } } // dataSize if (methodName.equals("dataSize")) { checkArgLength("dataSize", args, 0, 0); return toCFML(coll.getStats().get("size")); } // distinct if (methodName.equals("distinct")) { int len = checkArgLength("distinct", args, 1, 2); if (len == 1) { return toCFML(coll.distinct(caster.toString(args[0]))); } else if (len == 2) { return toCFML(coll.distinct(caster.toString(args[0]), toDBObject(args[1]))); } } // drop if (methodName.equals("drop")) { checkArgLength("drop", args, 0, 0); coll.drop(); return null; } // dropIndex if (methodName.equals("dropIndex")) { checkArgLength("dropIndex", args, 1, 1); DBObject dbo = toDBObject(args[0], null); if (dbo != null) coll.dropIndex(dbo); else coll.dropIndex(caster.toString(args[0])); return null; } // dropIndexes if (methodName.equals("dropIndexes")) { int len = checkArgLength("dropIndexes", args, 0, 1); if (len == 0) { coll.dropIndexes(); return null; } else if (len == 1) { coll.dropIndexes(caster.toString(args[0])); return null; } } // createIndex if (methodName.equals("createIndex") || methodName.equals("ensureIndex")) { int len = checkArgLength("createIndex", args, 1, 3); if (len == 1) { DBObject dbo = toDBObject(args[0], null); if (dbo != null) coll.createIndex(dbo); else coll.createIndex(caster.toString(args[0])); return null; } if (len == 2) { DBObject p1 = toDBObject(args[0]); DBObject p2 = toDBObject(args[1], null); if (p2 != null) coll.createIndex(p1, p2); else coll.createIndex(p1, caster.toString(args[1])); return null; } else if (len == 3) { coll.createIndex(toDBObject(args[0]), caster.toString(args[1]), caster.toBooleanValue(args[2])); return null; } } // getStats if (methodName.equals("getStats") || methodName.equals("stats")) { checkArgLength("getStats", args, 0, 0); return toCFML(coll.getStats()); } // getIndexes if (methodName.equals("getIndexes") || methodName.equals("getIndexInfo")) { checkArgLength(methodName.getString(), args, 0, 0); return toCFML(coll.getIndexInfo()); } // getWriteConcern if (methodName.equals("getWriteConcern")) { checkArgLength("getWriteConcern", args, 0, 0); return toCFML(coll.getWriteConcern()); } // find if (methodName.equals("find")) { int len = checkArgLength("find", args, 0, 3); DBCursor cursor = null; if (len == 0) { cursor = coll.find(); } else if (len == 1) { cursor = coll.find(toDBObject(args[0])); } else if (len == 2) { cursor = coll.find(toDBObject(args[0]), toDBObject(args[1])); } else if (len == 3) { cursor = coll.find(toDBObject(args[0]), toDBObject(args[1])).skip(caster.toIntValue(args[2])); } return toCFML(cursor); } // findOne else if (methodName.equals("findOne")) { int len = checkArgLength("findOne", args, 0, 3); DBObject obj = null; if (len == 0) { obj = coll.findOne(); } else if (len == 1) { DBObject arg1 = toDBObject(args[0], null); if (arg1 != null) obj = coll.findOne(arg1); else obj = coll.findOne(args[0]); } else if (len == 2) { DBObject arg1 = toDBObject(args[0], null); if (arg1 != null) obj = coll.findOne(arg1, toDBObject(args[1])); else obj = coll.findOne(args[0], toDBObject(args[1])); } else if (len == 3) { obj = coll.findOne(toDBObject(args[0]), toDBObject(args[1]), toDBObject(args[2])); } return toCFML(obj); } // findAndRemove if (methodName.equals("findAndRemove")) { checkArgLength("findAndRemove", args, 1, 1); DBObject obj = coll.findAndRemove(toDBObject(args[0])); return toCFML(obj); } // findAndModify if (methodName.equals("findAndModify")) { int len = args == null ? 0 : args.length; if (len != 2 && len != 3 && len != 7) { throw exp.createApplicationException( "the function findAndModify needs 2, 3 or 7 arguments, but you have defined only " + len); } DBObject obj = null; if (len == 2) { obj = coll.findAndModify(toDBObject(args[0]), toDBObject(args[1])); } else if (len == 3) { obj = coll.findAndModify(toDBObject(args[0]), toDBObject(args[1]), toDBObject(args[2])); } else if (len == 7) { obj = coll.findAndModify(toDBObject(args[0]), toDBObject(args[1]), toDBObject(args[2]), caster.toBooleanValue(args[3]), toDBObject(args[4]), caster.toBooleanValue(args[5]), caster.toBooleanValue(args[6])); } return toCFML(obj); } //group /* TODO: needs GroupCommand if(methodName.equals("group")) { int len=checkArgLength("group",args,1,1); if(len==1){ return toCFML(coll.group( toDBObject(args[0]) )); } }*/ // insert if (methodName.equals("insert")) { checkArgLength("insert", args, 1, 1); return toCFML(coll.insert(toDBObjectArray(args[0]))); } // insertMany(required array documents, struct options) valid options keys are string "writeconcern", boolean "ordered" if (methodName.equals("insertMany")) { int len = checkArgLength("insertMany", args, 1, 2); BulkWriteOperation bulk = coll.initializeOrderedBulkOperation(); WriteConcern wc = coll.getWriteConcern(); if (len == 2) { DBObject dboOpts = toDBObject(args[1]); if (dboOpts.containsField("ordered")) { if (!decision.isBoolean(dboOpts.get("ordered"))) throw exp.createApplicationException("ordered in options must be boolean value"); if (!caster.toBooleanValue(dboOpts.get("ordered"))) { bulk = coll.initializeUnorderedBulkOperation(); } } if (dboOpts.containsField("writeconcern")) { WriteConcern newWc = WriteConcern.valueOf(caster.toString(dboOpts.get("writeconcern"))); if (newWc != null) { wc = newWc; } } } Map<String, Object> result = new LinkedHashMap<String, Object>(); BulkWriteResult bulkResult; List<Map> writeErrors = new ArrayList<Map>(); Array arr = caster.toArray(args[0]); if (arr.size() == 0) { result.put("nInserted", 0); result.put("writeErrors", writeErrors); result.put("acknowledged", true); return toCFML(result); } Iterator<Object> it = arr.valueIterator(); while (it.hasNext()) { bulk.insert(toDBObject(it.next())); } try { bulkResult = bulk.execute(wc); } catch (BulkWriteException e) { Map<String, Object> bulkErrorItem; BulkWriteError bulkError; bulkResult = e.getWriteResult(); List<BulkWriteError> errors = e.getWriteErrors(); Iterator<BulkWriteError> jj = errors.iterator(); while (jj.hasNext()) { bulkErrorItem = new LinkedHashMap<String, Object>(); bulkError = jj.next(); bulkErrorItem.put("index", (bulkError.getIndex() + 1)); // +1 so we get index of item in CFML array bulkErrorItem.put("code", bulkError.getCode()); bulkErrorItem.put("errmsg", bulkError.getMessage()); bulkErrorItem.put("op", bulkError.getDetails()); writeErrors.add(bulkErrorItem); } } result.put("acknowledged", bulkResult.isAcknowledged()); if (bulkResult.isAcknowledged()) { result.put("nInserted", bulkResult.getInsertedCount()); result.put("writeErrors", writeErrors); } return toCFML(result); } // bulkWrite(required array operations, struct options) valid options keys are string "writeconcern", boolean "ordered", boolean "bypassDocumentValidation" // an operation is a struct with the following keys: { "operation":[insert|update|updateOne|remove|removeOne], "document":[(required if operation is insert) - a doc to insert], "query":[(optional) - the query to find for remove/update operations], "update":[(required for update/updateOne) - the update document] } // i.e. dbCollection.bulkWrite([ // {"operation":"insert", "document":{"test":"insert"}} // ,{"operation":"updateOne", "query":{"_id":"foo"}, "update":{"$set":{"updated":true}}} // ,{"operation":"removeOne", "query":{"_id":"goaway"}} // ],{"ordered":false}) if (methodName.equals("bulkWrite")) { int len = checkArgLength("bulkWrite", args, 1, 2); BulkWriteOperation bulk = coll.initializeOrderedBulkOperation(); WriteConcern wc = coll.getWriteConcern(); if (len == 2) { DBObject dboOpts = toDBObject(args[1]); if (dboOpts.containsField("ordered")) { if (!decision.isBoolean(dboOpts.get("ordered"))) throw exp.createApplicationException("ordered in options must be boolean value"); if (!caster.toBooleanValue(dboOpts.get("ordered"))) { bulk = coll.initializeUnorderedBulkOperation(); } } if (dboOpts.containsField("bypassDocumentValidation")) { if (!decision.isBoolean(dboOpts.get("bypassDocumentValidation"))) throw exp.createApplicationException( "bypassDocumentValidation in options must be boolean value"); bulk.setBypassDocumentValidation( caster.toBooleanValue(dboOpts.get("bypassDocumentValidation"))); } if (dboOpts.containsField("writeconcern")) { WriteConcern newWc = WriteConcern.valueOf(caster.toString(dboOpts.get("writeconcern"))); if (newWc != null) { wc = newWc; } } } Map<String, Object> result = new LinkedHashMap<String, Object>(); BulkWriteResult bulkResult; List<Map> writeErrors = new ArrayList<Map>(); Array arr = caster.toArray(args[0]); if (arr.size() == 0) { result.put("nInserted", 0); result.put("nMatched", 0); result.put("nModified", 0); result.put("nRemoved", 0); result.put("writeErrors", writeErrors); result.put("acknowledged", true); return toCFML(result); } Iterator<Object> it = arr.valueIterator(); while (it.hasNext()) { DBObject operation = toDBObject(it.next()); if (operation.get("operation") == "update") { // do stuff to add update operation bulk.find(toDBObject(operation.get("query"))).update(toDBObject(operation.get("update"))); } else if (operation.get("operation") == "updateOne") { // do stuff to add updateOne operation bulk.find(toDBObject(operation.get("query"))).updateOne(toDBObject(operation.get("update"))); } else if (operation.get("operation") == "remove") { // do stuff to add remove operation bulk.find(toDBObject(operation.get("query"))).remove(); } else if (operation.get("operation") == "removeOne") { // do stuff to add removeOne operation bulk.find(toDBObject(operation.get("query"))).removeOne(); } else if (operation.get("operation") == "insert") { bulk.insert(toDBObject(operation.get("document"))); } } try { bulkResult = bulk.execute(wc); } catch (BulkWriteException e) { Map<String, Object> bulkErrorItem; BulkWriteError bulkError; bulkResult = e.getWriteResult(); List<BulkWriteError> errors = e.getWriteErrors(); Iterator<BulkWriteError> jj = errors.iterator(); while (jj.hasNext()) { bulkErrorItem = new LinkedHashMap<String, Object>(); bulkError = jj.next(); bulkErrorItem.put("index", (bulkError.getIndex() + 1)); // +1 so we get index of item in CFML array bulkErrorItem.put("code", bulkError.getCode()); bulkErrorItem.put("errmsg", bulkError.getMessage()); bulkErrorItem.put("op", bulkError.getDetails()); writeErrors.add(bulkErrorItem); } } result.put("acknowledged", bulkResult.isAcknowledged()); if (bulkResult.isAcknowledged()) { result.put("nInserted", bulkResult.getInsertedCount()); result.put("nMatched", bulkResult.getMatchedCount()); result.put("nModified", bulkResult.getModifiedCount()); result.put("nRemoved", bulkResult.getRemovedCount()); result.put("writeErrors", writeErrors); } return toCFML(result); } //mapReduce if (methodName.equals("mapReduce")) { int len = checkArgLength("mapReduce", args, 4, 4); if (len == 4) { return toCFML(coll.mapReduce(caster.toString(args[0]), caster.toString(args[1]), caster.toString(args[2]), toDBObject(args[3]))); } } // remove if (methodName.equals("remove")) { checkArgLength("remove", args, 1, 1); return toCFML(coll.remove(toDBObject(args[0]))); } // rename if (methodName.equals("rename") || methodName.equals("renameCollection")) { int len = checkArgLength(methodName.getString(), args, 1, 2); if (len == 1) { return toCFML(coll.rename(caster.toString(args[0]))); } else if (len == 2) { return toCFML(coll.rename(caster.toString(args[0]), caster.toBooleanValue(args[1]))); } } // save if (methodName.equals("save")) { checkArgLength("save", args, 1, 1); return toCFML(coll.save(toDBObject(args[0]))); } // setWriteConcern if (methodName.equals("setWriteConcern")) { checkArgLength("setWriteConcern", args, 1, 1); WriteConcern wc = WriteConcern.valueOf(caster.toString(args[0])); if (wc != null) { coll.setWriteConcern(wc); } return null; } // storageSize if (methodName.equals("storageSize")) { checkArgLength("storageSize", args, 0, 0); return toCFML(coll.getStats().get("storageSize")); } // totalIndexSize if (methodName.equals("totalIndexSize")) { checkArgLength("totalIndexSize", args, 0, 0); return toCFML(coll.getStats().get("totalIndexSize")); } // update if (methodName.equals("update")) { int len = checkArgLength("update", args, 2, 4); if (len == 2) { return toCFML(coll.update(toDBObject(args[0]), toDBObject(args[1]))); } else if (len == 3) { return toCFML(coll.update(toDBObject(args[0]), toDBObject(args[1]), caster.toBooleanValue(args[2]), false)); } else if (len == 4) { return toCFML(coll.update(toDBObject(args[0]), toDBObject(args[1]), caster.toBooleanValue(args[2]), caster.toBooleanValue(args[3]))); } } String functionNames = "aggregate,count,dataSize,distinct,drop,dropIndex,dropIndexes,createIndex,stats,getIndexes,getWriteConcern,find,findOne,findAndRemove,findAndModify," + "group,insert,insertMany,bulkWrite,mapReduce,remove,rename,save,setWriteConcern,storageSize,totalIndexSize,update"; throw exp.createApplicationException( "function " + methodName + " does not exist existing functions are [" + functionNames + "]"); }
From source file:org.mongodb.morphia.mapping.Mapper.java
License:Open Source License
public <T> T fromDb(final DBObject dbObject, final T entity, final EntityCache cache) { //hack to bypass things and just read the value. if (entity instanceof MappedField) { readMappedField(dbObject, (MappedField) entity, entity, cache); return entity; }/*from w w w .jav a2 s. c o m*/ // check the history key (a key is the namespace + id) if (dbObject.containsField(ID_KEY) && getMappedClass(entity).getIdField() != null && getMappedClass(entity).getEntityAnnotation() != null) { final Key<T> key = new Key(entity.getClass(), dbObject.get(ID_KEY)); final T cachedInstance = cache.getEntity(key); if (cachedInstance != null) { return cachedInstance; } else { cache.putEntity(key, entity); // to avoid stackOverflow in recursive refs } } final MappedClass mc = getMappedClass(entity); final DBObject updated = mc.callLifecycleMethods(PreLoad.class, entity, dbObject, this); try { for (final MappedField mf : mc.getPersistenceFields()) { readMappedField(updated, mf, entity, cache); } } catch (final MappingException e) { Object id = dbObject.get(ID_KEY); String entityName = entity.getClass().getName(); throw new MappingException(format("Could not map %s with ID: %s", entityName, id), e); } if (updated.containsField(ID_KEY) && getMappedClass(entity).getIdField() != null) { final Key key = new Key(entity.getClass(), updated.get(ID_KEY)); cache.putEntity(key, entity); } mc.callLifecycleMethods(PostLoad.class, entity, updated, this); return entity; }
From source file:org.mule.module.mongo.automation.MongoTestParent.java
License:Open Source License
protected int findFiles() { int size = 0; Iterable<DBObject> iterable = null; try {/* ww w . j a va 2 s . c om*/ // MuleEvent event = getTestEvent(new BasicDBObject()); iterable = runFlowAndGetPayload("find-files"); for (DBObject dbObj : iterable) { if (dbObj.containsField("filename")) { size++; } } } catch (Exception e) { fail(ConnectorTestUtils.getStackTrace(e)); } return size; }
From source file:org.mule.module.mongo.automation.testcases.MongoTestParent.java
License:Open Source License
@SuppressWarnings("unchecked") // Returns all number of all files in database as per find-files operation protected int findFiles() { Iterable<DBObject> iterable = null; MuleEvent response = null;/*from ww w . j a v a 2 s .com*/ try { MessageProcessor findFilesFlow = lookupMessageProcessorConstruct("find-files"); MuleEvent event = getTestEvent(new BasicDBObject()); response = findFilesFlow.process(event); } catch (Exception e) { e.printStackTrace(); fail(); } iterable = (Iterable<DBObject>) response.getMessage().getPayload(); int size = 0; for (DBObject dbObj : iterable) { if (dbObj.containsField("filename")) { size++; } } return size; }
From source file:org.nuxeo.ecm.core.storage.mongodb.MongoDBQueryBuilder.java
License:Apache License
protected DBObject walkAnd(List<Operand> values) { List<Object> list = walkOperandList(values); // check wildcards in the operands, extract common prefixes to use $elemMatch Map<String, List<FieldInfoDBObject>> propBaseKeyToDBOs = new LinkedHashMap<>(); Map<String, String> propBaseKeyToFieldBase = new HashMap<>(); for (Iterator<Object> it = list.iterator(); it.hasNext();) { Object ob = it.next();/*from w w w.j a v a 2s .c o m*/ if (ob instanceof FieldInfoDBObject) { FieldInfoDBObject fidbo = (FieldInfoDBObject) ob; FieldInfo fieldInfo = fidbo.fieldInfo; if (fieldInfo.hasWildcard) { if (fieldInfo.fieldSuffix != null && fieldInfo.fieldSuffix.contains("*")) { // a double wildcard of the form foo/*/bar/* is not a problem if bar is an array // TODO prevent deep complex multiple wildcards // throw new QueryParseException("Cannot use two wildcards: " + fieldInfo.prop); } // generate a key unique per correlation for this element match String wildcardNumber = fieldInfo.fieldWildcard; if (wildcardNumber.isEmpty()) { // negative to not collide with regular correlated wildcards wildcardNumber = String.valueOf(-counter.incrementAndGet()); } String propBaseKey = fieldInfo.fieldPrefix + "/*" + wildcardNumber; // store object for this key List<FieldInfoDBObject> dbos = propBaseKeyToDBOs.get(propBaseKey); if (dbos == null) { propBaseKeyToDBOs.put(propBaseKey, dbos = new LinkedList<>()); } dbos.add(fidbo); // remember for which field base this is String fieldBase = fieldInfo.fieldPrefix.replace("/", "."); propBaseKeyToFieldBase.put(propBaseKey, fieldBase); // remove from list, will be re-added later through propBaseKeyToDBOs it.remove(); } } } // generate $elemMatch items for correlated queries for (Entry<String, List<FieldInfoDBObject>> es : propBaseKeyToDBOs.entrySet()) { String propBaseKey = es.getKey(); List<FieldInfoDBObject> fidbos = es.getValue(); if (fidbos.size() == 1) { // regular uncorrelated match list.addAll(fidbos); } else { DBObject elemMatch = new BasicDBObject(); for (FieldInfoDBObject fidbo : fidbos) { // truncate field name to just the suffix FieldInfo fieldInfo = fidbo.fieldInfo; Object value = fidbo.get(fieldInfo.queryField); String fieldSuffix = fieldInfo.fieldSuffix.replace("/", "."); if (elemMatch.containsField(fieldSuffix)) { // ecm:acl/*1/principal = 'bob' AND ecm:acl/*1/principal = 'steve' // cannot match // TODO do better value = "__NOSUCHVALUE__"; } elemMatch.put(fieldSuffix, value); } String fieldBase = propBaseKeyToFieldBase.get(propBaseKey); BasicDBObject dbo = new BasicDBObject(fieldBase, new BasicDBObject(QueryOperators.ELEM_MATCH, elemMatch)); list.add(dbo); } } if (list.size() == 1) { return (DBObject) list.get(0); } else { return new BasicDBObject(QueryOperators.AND, list); } }
From source file:org.nuxeo.ecm.core.storage.mongodb.MongoDBRepository.java
License:Apache License
@Override public void updateState(String id, StateDiff diff, ChangeTokenUpdater changeTokenUpdater) { List<DBObject> updates = converter.diffToBson(diff); for (DBObject update : updates) { DBObject query = new BasicDBObject(idKey, id); if (changeTokenUpdater == null) { if (log.isTraceEnabled()) { log.trace("MongoDB: UPDATE " + id + ": " + update); }// w w w . j a v a 2s. c o m } else { // assume bson is identical to dbs internals // condition works even if value is null Map<String, Serializable> conditions = changeTokenUpdater.getConditions(); Map<String, Serializable> tokenUpdates = changeTokenUpdater.getUpdates(); if (update.containsField(MONGODB_SET)) { ((DBObject) update.get(MONGODB_SET)).putAll(tokenUpdates); } else { DBObject set = new BasicDBObject(); set.putAll(tokenUpdates); update.put(MONGODB_SET, set); } if (log.isTraceEnabled()) { log.trace("MongoDB: UPDATE " + id + ": IF " + conditions + " THEN " + update); } query.putAll(conditions); } WriteResult w = coll.update(query, update); if (w.getN() != 1) { log.trace("MongoDB: -> CONCURRENT UPDATE: " + id); throw new ConcurrentUpdateException(id); } // TODO dupe exception // throw new DocumentException("Missing: " + id); } }
From source file:org.oasis.datacore.sdk.data.spring.DatacoreMappingMongoConverter.java
License:Apache License
protected void writeInternal(Object obj, final DBObject dbo, MongoPersistentEntity<?> entity) { if (obj == null) { return;//from w w w . ja v a2 s . com } if (null == entity) { throw new MappingException("No mapping metadata found for entity of type " + obj.getClass().getName()); } final BeanWrapper<MongoPersistentEntity<Object>, Object> wrapper = BeanWrapper.create(obj, conversionService); final MongoPersistentProperty idProperty = entity.getIdProperty(); if (!dbo.containsField("_id") && null != idProperty) { boolean fieldAccessOnly = idProperty.usePropertyAccess() ? false : useFieldAccessOnly; try { Object id = wrapper.getProperty(idProperty, Object.class, fieldAccessOnly); dbo.put("_id", idMapper.convertId(id)); } catch (ConversionException ignored) { } } // Write the properties entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() { public void doWithPersistentProperty(MongoPersistentProperty prop) { if (prop.equals(idProperty)) { return; } boolean fieldAccessOnly = prop.usePropertyAccess() ? false : useFieldAccessOnly; Object propertyObj = wrapper.getProperty(prop, prop.getType(), fieldAccessOnly); //if (null != propertyObj) { // [Ozwillo] HACK to allow unset / set to null at save if (/*[Ozwillo] HACK*/null != propertyObj && /*[Ozwillo] END*/!conversions.isSimpleType(propertyObj.getClass())) { writePropertyInternal(propertyObj, dbo, prop); } else { writeSimpleInternal(propertyObj, dbo, prop.getFieldName()); } //} // [Ozwillo] HACK } }); entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() { public void doWithAssociation(Association<MongoPersistentProperty> association) { MongoPersistentProperty inverseProp = association.getInverse(); Class<?> type = inverseProp.getType(); Object propertyObj = wrapper.getProperty(inverseProp, type, useFieldAccessOnly); //if (null != propertyObj) { // [Ozwillo] HACK to allow unset / set to null at save writePropertyInternal(propertyObj, dbo, inverseProp); //} // [Ozwillo] HACK } }); // [Ozwillo] HACK to persist Datacore model fields at root level NOT FOR NOW /*if ("DCEntity".equals(entity.getName())) { DCEntity dcEntity = (DCEntity) object; DCModel dcModel = dcModelService.getModel(dcEntity.getType()); for (DCField dcField : dcModel.getAllFields()) { } }*/ }