List of usage examples for com.mongodb BasicDBList BasicDBList
BasicDBList
From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java
License:Open Source License
/** * Get the positions of the model links at the given timestamp * save result in MongoDB/*w ww . ja v a 2 s.c o m*/ */ public void WriteLinksPositionsAt(String ts_str, String model_name, String traj_db_name) { // transform the knowrob time to double with 3 decimal precision double timestamp = (double) Math.round((parseTime_d(ts_str) - TIME_OFFSET) * 1000) / 1000; // set default coll name String traj_coll_name = this.coll.getName();// + "_" + model_name + "_links_at_"+ timestamp; // remove the knowrob namespace (http://knowrob.org/kb/knowrob.owl#) form the model // String model_name = kr_model_name.split("#")[1]; //System.out.println("Java - timestamp: " + timestamp + " model name: " + model_name); // $and list for querying the $match in the aggregation BasicDBList time_and_name = new BasicDBList(); // add the timestamp and the model name time_and_name.add(new BasicDBObject("timestamp", new BasicDBObject("$lte", timestamp))); time_and_name.add(new BasicDBObject("models.name", model_name)); // create the pipeline operations, first the $match DBObject match_time_and_name = new BasicDBObject("$match", new BasicDBObject("$and", time_and_name)); // sort the results in descending order on the timestamp (keep most recent result first) DBObject sort_desc = new BasicDBObject("$sort", new BasicDBObject("timestamp", -1)); // $limit the result to 1, we only need one pose DBObject limit_result = new BasicDBObject("$limit", 1); // $unwind models in order to output only the queried model DBObject unwind_models = new BasicDBObject("$unwind", "$models"); // $match for the given model name from the unwinded models DBObject match_model = new BasicDBObject("$match", new BasicDBObject("models.name", model_name)); // build the $projection operation DBObject proj_fields = new BasicDBObject("_id", 0); proj_fields.put("timestamp", 1); proj_fields.put("links_pos", "$models.links.pos"); DBObject project = new BasicDBObject("$project", proj_fields); // run aggregation List<DBObject> pipeline = Arrays.asList(match_time_and_name, sort_desc, limit_result, unwind_models, match_model, project); AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100) .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build(); Cursor cursor = this.coll.aggregate(pipeline, aggregationOptions); try { MongoClient mongoClient = new MongoClient(this.dbHost, 27017); DB traj_db = mongoClient.getDB(traj_db_name); // check if the collection already exists if (traj_db.collectionExists(traj_coll_name)) { System.out .println("!!! Collection: \'" + traj_db_name + "." + traj_coll_name + "\' already exists!"); } // create the collection else { // create collection DBCollection traj_coll = traj_db.getCollection(traj_coll_name); System.out.println("Java - Writing to \'" + traj_db_name + "." + traj_coll_name + "\'"); // if cursor not empty, append matadata to the first doc if (cursor.hasNext()) { // get pancake roundess again in order to append it to the metadata double roundess = this.GetPancakeRoundness(ts_str, model_name); // create metadata doc BasicDBObject meta_data = new BasicDBObject("name", traj_coll_name).append("type", "links_pos") .append("timestamp", timestamp).append("roundness", roundess) .append("description", "Pancake links positions.."); // get the first document as the next cursor and append the metadata to it BasicDBObject first_doc = (BasicDBObject) cursor.next(); first_doc.append("metadata", meta_data); // insert document with metadata traj_coll.insert(first_doc); } // if query returned no values for these timestamps, get the pose at the nearest timestamp else { System.out.println("Java - WriteLinksPositionsAt Query returned no results!'"); } // insert rest of trajectory while (cursor.hasNext()) { traj_coll.insert(cursor.next()); } } } catch (UnknownHostException e) { e.printStackTrace(); } }
From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java
License:Open Source License
/** * Get the positions of the model links at the given timestamp * view results as rviz markers/*from w w w.ja v a 2 s . c o m*/ */ public void ViewLinksPositionsAt(String ts_str, String model_name, String markerID, String markerType, String color, float scale) { // transform the knowrob time to double with 3 decimal precision double timestamp = (double) Math.round((parseTime_d(ts_str) - TIME_OFFSET) * 1000) / 1000; // $and list for querying the $match in the aggregation BasicDBList time_and_name = new BasicDBList(); // add the timestamp and the model name time_and_name.add(new BasicDBObject("timestamp", new BasicDBObject("$lte", timestamp))); time_and_name.add(new BasicDBObject("models.name", model_name)); // create the pipeline operations, first the $match DBObject match_time_and_name = new BasicDBObject("$match", new BasicDBObject("$and", time_and_name)); // sort the results in descending order on the timestamp (keep most recent result first) DBObject sort_desc = new BasicDBObject("$sort", new BasicDBObject("timestamp", -1)); // $limit the result to 1, we only need one pose DBObject limit_result = new BasicDBObject("$limit", 1); // $unwind models in order to output only the queried model DBObject unwind_models = new BasicDBObject("$unwind", "$models"); // $match for the given model name from the unwinded models DBObject match_model = new BasicDBObject("$match", new BasicDBObject("models.name", model_name)); // build the $projection operation DBObject proj_fields = new BasicDBObject("_id", 0); proj_fields.put("timestamp", 1); proj_fields.put("links_pos", "$models.links.pos"); DBObject project = new BasicDBObject("$project", proj_fields); // run aggregation List<DBObject> pipeline = Arrays.asList(match_time_and_name, sort_desc, limit_result, unwind_models, match_model, project); AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100) .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build(); Cursor cursor = this.coll.aggregate(pipeline, aggregationOptions); // Traj as dynamic array ArrayList<Vector3d> positions = new ArrayList<Vector3d>(); // if cursor not empty, append matadata to the first doc if (cursor.hasNext()) { // get the first document as the next cursor BasicDBObject first_doc = (BasicDBObject) cursor.next(); // get the list of links pos BasicDBList pos_list = (BasicDBList) first_doc.get("links_pos"); // pos_list and rot_list length should be always the same for (int i = 0; i < pos_list.size(); ++i) { positions.add(new Vector3d(((BasicDBObject) pos_list.get(i)).getDouble("x"), ((BasicDBObject) pos_list.get(i)).getDouble("y"), ((BasicDBObject) pos_list.get(i)).getDouble("z"))); } } // if query returned no values for these timestamps, get the pose at the nearest timestamp else { System.out.println("Java - ViewLinksPositionsAt Query returned no results!'"); } // create the markers this.CreateMarkers(positions, markerID, markerType, color, scale); }
From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java
License:Open Source License
/** * Get the positions of the model links at the given timestamp *//* www. java 2s .c o m*/ public List<Point3d> GetLinksPositions(String ts_str, String model_name) { // list of all the links positions List<Point3d> links_positions = new ArrayList<Point3d>(); // transform the knowrob time to double with 3 decimal precision double timestamp = (double) Math.round((parseTime_d(ts_str) - TIME_OFFSET) * 1000) / 1000; // remove the knowrob namespace (http://knowrob.org/kb/knowrob.owl#) form the model // String model_name = kr_model_name.split("#")[1]; //System.out.println("Java - timestamp: " + timestamp + " model name: " + model_name); // $and list for querying the $match in the aggregation BasicDBList time_and_name = new BasicDBList(); // add the timestamp and the model name time_and_name.add(new BasicDBObject("timestamp", new BasicDBObject("$lte", timestamp))); time_and_name.add(new BasicDBObject("models.name", model_name)); // create the pipeline operations, first the $match DBObject match_time_and_name = new BasicDBObject("$match", new BasicDBObject("$and", time_and_name)); // sort the results in descending order on the timestamp (keep most recent result first) DBObject sort_desc = new BasicDBObject("$sort", new BasicDBObject("timestamp", -1)); // $limit the result to 1, we only need one pose DBObject limit_result = new BasicDBObject("$limit", 1); // $unwind models in order to output only the queried model DBObject unwind_models = new BasicDBObject("$unwind", "$models"); // $match for the given model name from the unwinded models DBObject match_model = new BasicDBObject("$match", new BasicDBObject("models.name", model_name)); // build the $projection operation DBObject proj_fields = new BasicDBObject("_id", 0); proj_fields.put("timestamp", 1); proj_fields.put("links_pos", "$models.links.pos"); DBObject project = new BasicDBObject("$project", proj_fields); // run aggregation List<DBObject> pipeline = Arrays.asList(match_time_and_name, sort_desc, limit_result, unwind_models, match_model, project); AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100) .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build(); Cursor cursor = this.coll.aggregate(pipeline, aggregationOptions); // if query has a response, append metadata to it if (cursor.hasNext()) { // get the first doc BasicDBObject first_doc = (BasicDBObject) cursor.next(); // get the positions array BasicDBList links_pos_arr = (BasicDBList) first_doc.get("links_pos"); // iterate the results for (int i = 0; i < links_pos_arr.size(); i++) { // current position Point3d pos = new Point3d(); // set the positions pos.x = ((BasicDBObject) links_pos_arr.get(i)).getDouble("x"); pos.y = ((BasicDBObject) links_pos_arr.get(i)).getDouble("y"); pos.z = ((BasicDBObject) links_pos_arr.get(i)).getDouble("z"); // add position to the list links_positions.add(pos); } } return links_positions; }
From source file:org.log4mongo.LoggingEventBsonifierImpl.java
License:Apache License
/** * BSONifies the given class name.//from w w w . java 2s.com * * @param className * The class name to BSONify <i>(may be null)</i>. * * @return The BSONified equivalent of the class name <i>(may be null)</i>. */ @SuppressWarnings(value = "unchecked") protected DBObject bsonifyClassName(final String className) { DBObject result = null; if (className != null && className.trim().length() > 0) { result = new BasicDBObject(); result.put(KEY_FQCN, className); List packageComponents = new BasicDBList(); String[] packageAndClassName = className.split("\\."); packageComponents.addAll(Arrays.asList(packageAndClassName)); // Requires Java 6 // packageComponents.addAll(Arrays.asList(Arrays.copyOf(packageAndClassName, // packageAndClassName.length - 1))); if (packageComponents.size() > 0) { result.put(KEY_PACKAGE, packageComponents); } result.put(KEY_CLASS_NAME, packageAndClassName[packageAndClassName.length - 1]); } return (result); }
From source file:org.mongodb.morphia.mapping.Mapper.java
License:Open Source License
/** * <p> Converts a java object to a mongo-compatible object (possibly a DBObject for complex mappings). Very similar to {@link * Mapper#toDBObject} </p> <p> Used (mainly) by query/update operations </p> *//* ww w.j av a 2 s . c o m*/ Object toMongoObject(final Object javaObj, final boolean includeClassName) { if (javaObj == null) { return null; } Class origClass = javaObj.getClass(); if (origClass.isAnonymousClass() && origClass.getSuperclass().isEnum()) { origClass = origClass.getSuperclass(); } final Object newObj = getConverters().encode(origClass, javaObj); if (newObj == null) { LOG.warning("converted " + javaObj + " to null"); return newObj; } final Class type = newObj.getClass(); final boolean bSameType = origClass.equals(type); //TODO: think about this logic a bit more. //Even if the converter changed it, should it still be processed? if (!bSameType && !(Map.class.isAssignableFrom(type) || Iterable.class.isAssignableFrom(type))) { return newObj; } else { //The converter ran, and produced another type, or it is a list/map boolean isSingleValue = true; boolean isMap = false; Class subType = null; if (type.isArray() || Map.class.isAssignableFrom(type) || Iterable.class.isAssignableFrom(type)) { isSingleValue = false; isMap = implementsInterface(type, Map.class); // subtype of Long[], List<Long> is Long subType = (type.isArray()) ? type.getComponentType() : getParameterizedClass(type, (isMap) ? 1 : 0); } if (isSingleValue && !isPropertyType(type)) { final DBObject dbObj = toDBObject(newObj); if (!includeClassName) { dbObj.removeField(CLASS_NAME_FIELDNAME); } return dbObj; } else if (newObj instanceof DBObject) { return newObj; } else if (isMap) { if (isPropertyType(subType)) { return toDBObject(newObj); } else { final HashMap m = new HashMap(); for (final Map.Entry e : (Iterable<Map.Entry>) ((Map) newObj).entrySet()) { m.put(e.getKey(), toMongoObject(e.getValue(), includeClassName)); } return m; } //Set/List but needs elements converted } else if (!isSingleValue && !isPropertyType(subType)) { final List<Object> values = new BasicDBList(); if (type.isArray()) { for (final Object obj : (Object[]) newObj) { values.add(toMongoObject(obj, includeClassName)); } } else { for (final Object obj : (Iterable) newObj) { values.add(toMongoObject(obj, includeClassName)); } } return values; } else { return newObj; } } }
From source file:org.mongolink.domain.criteria.RestrictionOr.java
License:Open Source License
@Override public void apply(DBObject query) { BasicDBList list = new BasicDBList(); for (Restriction restriction : getRestrictions()) { BasicDBObject subquery = new BasicDBObject(); restriction.apply(subquery);/*ww w. j a va2 s.c o m*/ list.add(subquery); } query.put("$or", list); }
From source file:org.mongolink.domain.mapper.CollectionMapper.java
License:Open Source License
@Override public void save(Object instance, DBObject into) { try {/* w w w. j a v a2 s .c o m*/ Collection collection = value(instance); BasicDBList list = new BasicDBList(); for (Object child : collection) { Object childObject = context().converterFor(child.getClass()).toDbValue(child); list.add(childObject); } into.put(name(), list); } catch (Exception e) { LOGGER.error("Can't saveInto collection {}", name(), e); } }
From source file:org.mule.modules.morphia.MorphiaConnector.java
License:Open Source License
/** * Calculates aggregates values without the need for complex map-reduce operations * * <p/>/*ww w . j a va 2 s .c o m*/ * {@sample.xml ../../../doc/mule-module-morphia.xml.sample morphia:aggregate} * * @param collection collection name * @param pipeline list of pipeline operators * @param exception The exception that needs to be thrown if there is an error executing the aggregation query * @param username the username to use in case authentication is required * @param password the password to use in case authentication is required, null * if no authentication is desired * @param host The host of the Mongo server. If the host is part of a replica set then you can specify all the hosts * separated by comma. * @param port The port of the Mongo server * @param database The database name of the Mongo server * @return the aggregation result * @throws Exception if there is an exception while aggregating */ @Processor public BasicDBList aggregate(String collection, List<Pipeline> pipeline, @Optional String exception, @Optional String username, @Optional @Password String password, @Optional String host, @Optional Integer port, @Optional String database) throws Exception { if (!pipeline.isEmpty()) { Datastore datastore = getDatastore(username, password, database, host, port); List<DBObject> dbObjects = new ArrayList<DBObject>(); for (Pipeline pipelineOperator : pipeline) { Object dbObject = JSON.parse(pipelineOperator.toJson()); if (dbObject == null || !(dbObject instanceof DBObject)) { throw new IllegalArgumentException("Illegal pipeline operator '" + pipelineOperator + "'"); } dbObjects.add((DBObject) dbObject); } BasicDBObjectBuilder builder = BasicDBObjectBuilder.start().add("aggregate", collection); builder.append("pipeline", dbObjects.toArray()); CommandResult result = datastore.getDB().command(builder.get()); if (result.ok()) { return (BasicDBList) result.get("result"); } if (exception != null) { throw getExceptionFromClassName(exception); } } // Return an empty list return new BasicDBList(); }
From source file:org.nuxeo.directory.mongodb.MongoDBReference.java
License:Apache License
private void setIdsFor(String field, String value, String fieldToUpdate, List<String> ids, MongoDBSession session) {/* ww w. ja v a2 s.co m*/ Set<String> idsToAdd = new HashSet<>(); if (ids != null) { idsToAdd.addAll(ids); } List<String> idsToDelete = new ArrayList<>(); List<String> existingIds = getIdsFor(field, value, fieldToUpdate, session); for (String id : existingIds) { if (!idsToAdd.remove(id)) { idsToDelete.add(id); } } if (!idsToDelete.isEmpty()) { BasicDBList list = new BasicDBList(); if (sourceField.equals(field)) { list.addAll(idsToDelete.stream().map(id -> buildDoc(value, id)).collect(Collectors.toList())); } else { list.addAll(idsToDelete.stream().map(id -> buildDoc(id, value)).collect(Collectors.toList())); } Bson deleteDoc = new BasicDBObject("$or", list); session.getCollection(collection).deleteMany(deleteDoc); } if (!idsToAdd.isEmpty()) { List<Document> list; if (sourceField.equals(field)) { list = idsToAdd.stream().map(id -> buildDoc(value, id)).collect(Collectors.toList()); } else { list = idsToAdd.stream().map(id -> buildDoc(id, value)).collect(Collectors.toList()); } session.getCollection(collection).insertMany(list); } }
From source file:org.obiba.magma.datasource.mongodb.converter.ValueConverter.java
License:Open Source License
public static Object marshall(Variable variable, Value value) { if (value == null || value.isNull()) return null; if (variable.isRepeatable()) { Collection<Object> list = new BasicDBList(); for (Value val : value.asSequence().getValues()) { list.add(marshall(val)); }/*from w ww .j a v a2 s . c o m*/ return list; } return marshall(value); }