List of usage examples for com.mongodb DBObject keySet
Set<String> keySet();
From source file:org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore.java
License:Apache License
@Override public <T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps) { log("create", updateOps); List<T> docs = new ArrayList<T>(); DBObject[] inserts = new DBObject[updateOps.size()]; List<String> ids = Lists.newArrayListWithCapacity(updateOps.size()); for (int i = 0; i < updateOps.size(); i++) { inserts[i] = new BasicDBObject(); UpdateOp update = updateOps.get(i); UpdateUtils.assertUnconditional(update); T target = collection.newDocument(this); UpdateUtils.applyChanges(target, update); docs.add(target);//from ww w . ja v a 2s. co m ids.add(updateOps.get(i).getId()); for (Entry<Key, Operation> entry : update.getChanges().entrySet()) { Key k = entry.getKey(); Operation op = entry.getValue(); switch (op.type) { case SET: case MAX: case INCREMENT: { inserts[i].put(k.toString(), op.value); break; } case SET_MAP_ENTRY: { Revision r = k.getRevision(); if (r == null) { throw new IllegalStateException("SET_MAP_ENTRY must not have null revision"); } DBObject value = (DBObject) inserts[i].get(k.getName()); if (value == null) { value = new RevisionEntry(r, op.value); inserts[i].put(k.getName(), value); } else if (value.keySet().size() == 1) { String key = value.keySet().iterator().next(); Object val = value.get(key); value = new BasicDBObject(key, val); value.put(r.toString(), op.value); inserts[i].put(k.getName(), value); } else { value.put(r.toString(), op.value); } break; } case REMOVE_MAP_ENTRY: // nothing to do for new entries break; } } if (!inserts[i].containsField(Document.MOD_COUNT)) { inserts[i].put(Document.MOD_COUNT, 1L); target.put(Document.MOD_COUNT, 1L); } } DBCollection dbCollection = getDBCollection(collection); final Stopwatch watch = startWatch(); boolean insertSuccess = false; try { try { dbCollection.insert(inserts); if (collection == Collection.NODES) { for (T doc : docs) { nodesCache.putIfAbsent((NodeDocument) doc); updateLocalChanges((NodeDocument) doc); } } insertSuccess = true; return true; } catch (MongoException e) { return false; } } finally { stats.doneCreate(watch.elapsed(TimeUnit.NANOSECONDS), collection, ids, insertSuccess); } }
From source file:org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore.java
License:Apache License
@CheckForNull protected <T extends Document> T convertFromDBObject(@Nonnull Collection<T> collection, @Nullable DBObject n) { T copy = null;//from ww w .j a v a 2s. c om if (n != null) { copy = collection.newDocument(this); for (String key : n.keySet()) { Object o = n.get(key); if (o instanceof String) { copy.put(key, o); } else if (o instanceof Number && (NodeDocument.MODIFIED_IN_SECS.equals(key) || Document.MOD_COUNT.equals(key))) { copy.put(key, Utils.asLong((Number) o)); } else if (o instanceof Long) { copy.put(key, o); } else if (o instanceof Integer) { copy.put(key, o); } else if (o instanceof Boolean) { copy.put(key, o); } else if (o instanceof BasicDBObject) { copy.put(key, convertMongoMap((BasicDBObject) o)); } } } return copy; }
From source file:org.apache.jackrabbit.oak.plugins.document.mongo.MongoUtils.java
License:Apache License
/** * Returns {@code true} if there is an index on the given fields, * {@code false} otherwise. If multiple fields are passed, this method * check if there a compound index on those field. This method does not * check the sequence of fields for a compound index. That is, this method * will return {@code true} as soon as it finds an index that covers the * given fields, no matter their sequence in the compound index. * * @param collection the collection./*from w w w . j a v a 2 s.co m*/ * @param fields the fields of an index. * @return {@code true} if the index exists, {@code false} otherwise. * @throws MongoException if the operation fails. */ static boolean hasIndex(DBCollection collection, String... fields) throws MongoException { Set<String> uniqueFields = Sets.newHashSet(fields); for (DBObject info : collection.getIndexInfo()) { DBObject key = (DBObject) info.get("key"); Set<String> indexFields = Sets.newHashSet(key.keySet()); if (uniqueFields.equals(indexFields)) { return true; } } return false; }
From source file:org.apache.jackrabbit.oak.plugins.document.mongo.replica.GetRootRevisionsCallable.java
License:Apache License
@Override public Timestamped<RevisionVector> call() throws Exception { List<Revision> revisions = new ArrayList<Revision>(); DBCollection collection = nodeCollections.get(hostName); long start = clock.getTime(); DBObject root = collection.findOne(new BasicDBObject(Document.ID, "0:/")); long end = clock.getTime(); long mid = (start + end) / 2; if (root == null) { LOG.warn("Can't get the root document on {}", hostName); return null; }/*from w ww. java 2 s . c om*/ DBObject lastRev = (DBObject) root.get("_lastRev"); for (String clusterId : lastRev.keySet()) { String rev = (String) lastRev.get(clusterId); revisions.add(Revision.fromString(rev)); } LOG.debug("Got /_lastRev from {}: {}", hostName, lastRev); return new Timestamped<RevisionVector>(new RevisionVector(revisions), mid); }
From source file:org.apache.manifoldcf.crawler.connectors.gridfs.GridFSRepositoryConnector.java
License:Apache License
/** * Apply metadata to a repository document. * * @param rd is the repository document to apply the metadata to. * @param metadataMap is the resultset row to use to get the metadata. All * non-special columns from this row will be considered to be metadata. *///from w w w . j a va 2 s . c om protected void applyMetadata(RepositoryDocument rd, DBObject metadataMap) throws ManifoldCFException { // Cycle through the document's fields Iterator iter = metadataMap.keySet().iterator(); while (iter.hasNext()) { String fieldName = (String) iter.next(); if (documentKnownColumns.get(fieldName) == null) { // Consider this field to contain metadata. // We can only accept non-binary metadata at this time. Object metadata = metadataMap.get(fieldName); if (!(metadata instanceof String)) { throw new ManifoldCFException( "Metadata field '" + fieldName + "' must be convertible to a string."); } rd.addField(fieldName, metadata.toString()); } } }
From source file:org.apache.metamodel.mongodb.mongo2.MongoDbDataContext.java
License:Apache License
/** * Performs an analysis of an available collection in a Mongo {@link DB} * instance and tries to detect the table structure based on the first 1000 * documents in the collection.//from ww w . j a va 2 s . c om * * @param db * the mongo DB * @param collectionName * the name of the collection * @return a table definition for mongo db. */ public static SimpleTableDef detectTable(DB db, String collectionName) { final DBCollection collection = db.getCollection(collectionName); final DBCursor cursor = collection.find().limit(1000); final SortedMap<String, Set<Class<?>>> columnsAndTypes = new TreeMap<String, Set<Class<?>>>(); while (cursor.hasNext()) { DBObject object = cursor.next(); Set<String> keysInObject = object.keySet(); for (String key : keysInObject) { Set<Class<?>> types = columnsAndTypes.get(key); if (types == null) { types = new HashSet<Class<?>>(); columnsAndTypes.put(key, types); } Object value = object.get(key); if (value != null) { types.add(value.getClass()); } } } cursor.close(); final String[] columnNames = new String[columnsAndTypes.size()]; final ColumnType[] columnTypes = new ColumnType[columnsAndTypes.size()]; int i = 0; for (Entry<String, Set<Class<?>>> columnAndTypes : columnsAndTypes.entrySet()) { final String columnName = columnAndTypes.getKey(); final Set<Class<?>> columnTypeSet = columnAndTypes.getValue(); final Class<?> columnType; if (columnTypeSet.size() == 1) { columnType = columnTypeSet.iterator().next(); } else { columnType = Object.class; } columnNames[i] = columnName; if (columnType == ObjectId.class) { columnTypes[i] = ColumnType.ROWID; } else { columnTypes[i] = ColumnTypeImpl.convertColumnType(columnType); } i++; } return new SimpleTableDef(collectionName, columnNames, columnTypes); }
From source file:org.apache.rya.mongodb.aggregation.AggregationPipelineQueryNode.java
License:Apache License
/** * Given a StatementPattern, generate an object representing the arguments * to a "$match" command that will find matching triples. * @param sp The StatementPattern to search for * @param path If given, specify the field that should be matched against * the statement pattern, using an ordered list of field names for a nested * field. E.g. to match records { "x": { "y": <statement pattern } }, pass * "x" followed by "y".//from w ww . j a va2 s . c o m * @return The argument of a "$match" query */ private static BasicDBObject getMatchExpression(final StatementPattern sp, final String... path) { final Var subjVar = sp.getSubjectVar(); final Var predVar = sp.getPredicateVar(); final Var objVar = sp.getObjectVar(); final Var contextVar = sp.getContextVar(); RyaIRI s = null; RyaIRI p = null; RyaType o = null; RyaIRI c = null; if (subjVar != null && subjVar.getValue() instanceof Resource) { s = RdfToRyaConversions.convertResource((Resource) subjVar.getValue()); } if (predVar != null && predVar.getValue() instanceof IRI) { p = RdfToRyaConversions.convertIRI((IRI) predVar.getValue()); } if (objVar != null && objVar.getValue() != null) { o = RdfToRyaConversions.convertValue(objVar.getValue()); } if (contextVar != null && contextVar.getValue() instanceof IRI) { c = RdfToRyaConversions.convertIRI((IRI) contextVar.getValue()); } final RyaStatement rs = new RyaStatement(s, p, o, c); final DBObject obj = strategy.getQuery(rs); // Add path prefix, if given if (path.length > 0) { final StringBuilder sb = new StringBuilder(); for (final String str : path) { sb.append(str).append("."); } final String prefix = sb.toString(); final Set<String> originalKeys = new HashSet<>(obj.keySet()); originalKeys.forEach(key -> { final Object value = obj.removeField(key); obj.put(prefix + key, value); }); } return (BasicDBObject) obj; }
From source file:org.apache.sling.mongodb.impl.ReadableValueMap.java
License:Apache License
protected void createValueMap(final DBObject dbObject) { if (dbObject == null) { this.valueMap = Collections.<String, Object>emptyMap(); } else {/*from w ww. j a va 2 s . co m*/ final Map<String, Object> map = new HashMap<String, Object>(); for (final String key : dbObject.keySet()) { final String name = MongoDBResourceProvider.keyToPropName(key); if (name != null) { map.put(key, dbObject.get(name)); } } this.valueMap = Collections.unmodifiableMap(map); } }
From source file:org.araqne.logdb.mongo.query.MongoFindCommand.java
License:Apache License
private Map<String, Object> convert(DBObject doc) { Map<String, Object> m = new HashMap<String, Object>(); for (String key : doc.keySet()) { m.put(key, convert(doc.get(key))); }/*from w ww . j a v a 2s.c om*/ return m; }
From source file:org.aw20.mongoworkbench.eclipse.view.table.QueryData.java
License:Open Source License
public QueryData(FindMongoCommand fmcmd) { if (!fmcmd.isSuccess()) return;/* ww w . ja va 2 s.c o m*/ findCommand = fmcmd; count = findCommand.getCount(); rightJustified = new HashSet<String>(); Set<String> columnSet = new HashSet<String>(); data = new ArrayList<Map>(); DBCursor cursor = fmcmd.getCursor(); while (cursor.hasNext()) { DBObject dbo = cursor.next(); columnSet.addAll(dbo.keySet()); if (dbo instanceof GridFSDBFile) { Map map = new HashMap(); map.put("_id", ((GridFSDBFile) dbo).getId()); map.put("chunkSize", ((GridFSDBFile) dbo).getChunkSize()); map.put("md5", ((GridFSDBFile) dbo).getMD5()); map.put("length", ((GridFSDBFile) dbo).getLength()); map.put("filename", ((GridFSDBFile) dbo).getFilename()); map.put("contentType", ((GridFSDBFile) dbo).getContentType()); if (((GridFSDBFile) dbo).getAliases() != null) map.put("aliases", ((GridFSDBFile) dbo).getAliases()); if (((GridFSDBFile) dbo).getMetaData() != null) map.put("metadata", ((GridFSDBFile) dbo).getMetaData()); map.put("uploadDate", ((GridFSDBFile) dbo).getUploadDate()); data.add(map); } else data.add(dbo.toMap()); } findCommand.close(); setColumns(columnSet, "_id"); }