List of usage examples for com.mongodb BasicDBObjectBuilder start
public static BasicDBObjectBuilder start()
From source file:com.conventus.mongodb.converter.ContentConverter.java
public DBObject toDBObject(Content content) { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start().append("name", content.getName()) .append("source", content.getSource()).append("file", content.getFile()); if (content.getId() != null) builder = builder.append("_id", new ObjectId(content.getId())); return builder.get(); }
From source file:com.conventus.mongodb.converter.FilmConverter.java
public DBObject toDBObject(Film film) { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start().append("name", film.getName()) .append("source", film.getSource()).append("file", film.getFile()).append("title", film.getTitle()) .append("genre", film.getGenre()).append("size", film.getSize()) .append("quality", film.getQuality()).append("resolution", film.getResolution()) .append("frameRate", film.getFrameRate()).append("language", film.getLanguage()) .append("duration", film.getDuration()).append("imdbRating", film.getImdbRating()) .append("mpr", film.getMpr()).append("peersSeeds", film.getPeersSeeds()); if (film.getId() != null) builder = builder.append("_id", new ObjectId(film.getId())); return builder.get(); }
From source file:com.conventus.mongodb.dao.MongoDBGenericDAO.java
public void update(T obj) { DBObject query = BasicDBObjectBuilder.start().append("_id", new ObjectId(((IEntity) obj).getId())).get(); this.col.update(query, this.converter.toDBObject(obj)); }
From source file:com.conventus.mongodb.dao.MongoDBGenericDAO.java
public void delete(T obj) { DBObject query = BasicDBObjectBuilder.start().append("_id", new ObjectId(((IEntity) obj).getId())).get(); this.col.remove(query); }
From source file:com.conventus.mongodb.dao.MongoDBGenericDAO.java
public T read(T obj) { DBObject query = BasicDBObjectBuilder.start().append("_id", new ObjectId(((IEntity) obj).getId())).get(); DBObject data = this.col.findOne(query); return this.converter.toObject(data); }
From source file:com.databank.mongodb.converter.UserConverter.java
public static DBObject toDBObject(User u) { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start().append("firstname", u.getFirstname()) .append("lastname", u.getLastname()).append("email", u.getEmail()) .append("username", u.getUsername()).append("location", u.getLocation()) .append("password1", u.getPassword1()); if (u.getID() != null) { builder = builder.append("_id", new ObjectId(u.getID())); }//from w w w.j a v a2 s . c o m return builder.get(); }
From source file:com.edgytech.umongo.CollectionPanel.java
License:Apache License
public void rename(ButtonBase button) { final CollectionNode colNode = getCollectionNode(); final DBCollection col = colNode.getCollection(); // select parent since this is getting renamed UMongo.instance.displayNode(colNode.getDbNode()); final String name = getStringFieldValue(Item.newName); final boolean dropTarget = getBooleanFieldValue(Item.dropTarget); DBObject cmd = BasicDBObjectBuilder.start().add("renameCollection", col.getFullName()) .add("to", col.getDB().getName() + "." + name).add("dropTarget", dropTarget).get(); new DbJobCmd(col.getDB().getSisterDB("admin"), cmd, null, null).addJob(); }
From source file:com.effektif.mongo.MongoJobStore.java
License:Apache License
public Job lockNextJob(DBObject query) { DBObject dbLock = BasicDBObjectBuilder.start().append(TIME, Time.now().toDate()).append(OWNER, lockOwner) .get();//from ww w . ja va 2 s. co m DBObject update = BasicDBObjectBuilder.start().push("$set").append(LOCK, dbLock).pop().get(); BasicDBObject dbJob = jobsCollection.findAndModify("lock-next-job", query, update); if (dbJob != null) { return readJob(dbJob); } return null; }
From source file:com.effektif.mongo.MongoJobStore.java
License:Apache License
protected BasicDBObjectBuilder buildLockNextJobQuery() { Date now = Time.now().toDate(); return BasicDBObjectBuilder.start() .append("$or", new DBObject[] { new BasicDBObject(DUE_DATE, new BasicDBObject("$exists", false)), new BasicDBObject(DUE_DATE, new BasicDBObject("$lte", now)) }) .push(DONE).append("$exists", false).pop(); }
From source file:com.effektif.mongo.MongoWorkflowInstanceStore.java
License:Apache License
@Override public void flush(WorkflowInstanceImpl workflowInstance) { if (log.isDebugEnabled()) log.debug("Flushing workflow instance..."); WorkflowInstanceUpdates updates = workflowInstance.getUpdates(); DBObject query = BasicDBObjectBuilder.start().add(_ID, new ObjectId(workflowInstance.id.getInternal())) // I don't recall what this line was for... if you re-add it, please add a comment to explain // .add(LOCK, writeLock(workflowInstance.lock)) .get();/* w ww . jav a 2 s . c o m*/ BasicDBObject sets = new BasicDBObject(); BasicDBObject unsets = new BasicDBObject(); BasicDBObject update = new BasicDBObject(); if (updates.isEndChanged) { // if (log.isDebugEnabled()) log.debug(" Workflow instance ended"); if (workflowInstance.end != null) { sets.append(END, workflowInstance.end.toDate()); sets.append(DURATION, workflowInstance.duration); } else { unsets.append(END, 1); unsets.append(DURATION, 1); } } if (updates.isEndStateChanged) { sets.append(END_STATE, workflowInstance.getEndState()); } // MongoDB can't combine updates of array elements together with // adding elements to that array. That's why we overwrite the whole // activity instance array when an update happened in there. // We do archive the ended (and joined) activity instances into a separate collection // that doesn't have to be loaded. if (updates.isActivityInstancesChanged) { BasicDBList dbActivityInstances = writeActiveActivityInstances(workflowInstance.activityInstances); sets.append(ACTIVITY_INSTANCES, dbActivityInstances); } if (updates.isVariableInstancesChanged) { writeVariableInstances(sets, workflowInstance); } if (updates.isWorkChanged) { List<String> work = writeWork(workflowInstance.work); if (work != null) { sets.put(WORK, work); } else { unsets.put(WORK, 1); } } if (updates.isAsyncWorkChanged) { List<String> workAsync = writeWork(workflowInstance.workAsync); if (workAsync != null) { sets.put(WORK_ASYNC, workAsync); } else { unsets.put(WORK_ASYNC, 1); } } if (updates.isNextActivityInstanceIdChanged) { sets.put(NEXT_ACTIVITY_INSTANCE_ID, workflowInstance.nextActivityInstanceId); } if (updates.isNextVariableInstanceIdChanged) { sets.put(NEXT_VARIABLE_INSTANCE_ID, workflowInstance.nextVariableInstanceId); } if (updates.isLockChanged) { // a lock is only removed unsets.put(LOCK, 1); } if (updates.isJobsChanged) { List<BasicDBObject> dbJobs = writeJobs(workflowInstance.jobs); if (dbJobs != null) { sets.put(JOBS, dbJobs); } else { unsets.put(JOBS, 1); } } if (updates.isPropertiesChanged) { if (workflowInstance.properties != null && workflowInstance.properties.size() > 0) sets.append(PROPERTIES, new BasicDBObject(workflowInstance.getProperties())); else unsets.append(PROPERTIES, 1); } if (!sets.isEmpty()) { update.append("$set", sets); } if (!unsets.isEmpty()) { update.append("$unset", unsets); } if (!update.isEmpty()) { workflowInstancesCollection.update("flush-workflow-instance", query, update, false, false); } // reset the update tracking as all changes have been saved workflowInstance.trackUpdates(false); }