List of usage examples for com.mongodb BasicDBObjectBuilder start
public static BasicDBObjectBuilder start()
From source file:com.gigaspaces.persistency.MongoClientConnector.java
License:Open Source License
private static DBObject normalize(DBObject obj) { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start(); Iterator<String> iterator = obj.keySet().iterator(); builder.push("$set"); while (iterator.hasNext()) { String key = iterator.next(); if (Constants.ID_PROPERTY.equals(key)) continue; Object value = obj.get(key); if (value == null) continue; builder.add(key, value);//from w w w .j a va 2s . c o m } return builder.get(); }
From source file:com.gigaspaces.persistency.MongoSpaceDataSource.java
License:Open Source License
@Override public Object getById(DataSourceIdQuery idQuery) { if (logger.isDebugEnabled()) logger.debug("MongoSpaceDataSource.getById(" + idQuery + ")"); SpaceDocumentMapper<DBObject> mapper = new DefaultSpaceDocumentMapper(idQuery.getTypeDescriptor()); BasicDBObjectBuilder documentBuilder = BasicDBObjectBuilder.start().add(Constants.ID_PROPERTY, mapper.toObject(idQuery.getId())); DBCollection mongoCollection = mongoClient.getCollection(idQuery.getTypeDescriptor().getTypeName()); DBObject result = mongoCollection.findOne(documentBuilder.get()); return mapper.toDocument(result); }
From source file:com.gigaspaces.persistency.MongoSpaceDataSource.java
License:Open Source License
@Override public DataIterator<Object> getDataIteratorByIds(DataSourceIdsQuery idsQuery) { if (logger.isDebugEnabled()) logger.debug("MongoSpaceDataSource.getDataIteratorByIds(" + idsQuery + ")"); DBObject[] ors = new DBObject[idsQuery.getIds().length]; for (int i = 0; i < ors.length; i++) ors[i] = BasicDBObjectBuilder.start().add(Constants.ID_PROPERTY, idsQuery.getIds()[i]).get(); DBObject document = QueryBuilder.start().or(ors).get(); DBCollection mongoCollection = mongoClient.getCollection(idsQuery.getTypeDescriptor().getTypeName()); DBCursor results = mongoCollection.find(document); return new DefaultMongoDataIterator(results, idsQuery.getTypeDescriptor()); }
From source file:com.github.mongo.labs.api.GeoService.java
License:Apache License
@GET @Path("/{longitude}/{latitude}") @ApiOperation(value = "Retrouve les speakers proche du point [longitude, latitude] (ex: 2.3521, 48.8670)", notes = "Un <b>index golocalis</b> doit tre prsent sur la collection des speakers") public String near(@PathParam("longitude") double longitude, @PathParam("latitude") double latitude) { DBObject query = BasicDBObjectBuilder.start().push("geo").push("$near").add("$maxDistance", 1500) .push("$geometry").add("type", "Point").add("coordinates", new Double[] { longitude, latitude }) .get();/*w w w .j av a 2 s . c om*/ return JSON.serialize(speakers.find(query)); }
From source file:com.heisenberg.mongo.MongoJobs.java
License:Apache License
protected BasicDBObjectBuilder buildJobQuery(boolean mustHaveProcessInstance) { Date now = Time.now().toDate(); return BasicDBObjectBuilder.start() .append("$or", new DBObject[] { new BasicDBObject(fields.duedate, new BasicDBObject("$exists", false)), new BasicDBObject(fields.duedate, new BasicDBObject("$lte", now)) }) .push(fields.done).append("$exists", false).pop().push(fields.workflowInstanceId) .append("$exists", mustHaveProcessInstance).pop(); }
From source file:com.heisenberg.mongo.MongoJobs.java
License:Apache License
public Job lockJob(boolean mustHaveProcessInstance) { DBObject query = buildJobQuery(mustHaveProcessInstance).push(fields.lock).append("$exists", false).pop() .get();// ww w. ja v a 2 s .c o m DBObject dbLock = BasicDBObjectBuilder.start().append(fields.time, Time.now().toDate()) .append(fields.owner, lockOwner).get(); DBObject update = BasicDBObjectBuilder.start().push("$set").append(fields.lock, dbLock).pop().get(); BasicDBObject dbJob = findAndModify(query, update); if (dbJob != null) { return readJob(dbJob); } return null; }
From source file:com.heisenberg.mongo.MongoWorkflowInstanceStore.java
License:Apache License
@Override public void flush(WorkflowInstanceImpl workflowInstance) { if (log.isDebugEnabled()) log.debug("Flushing..."); WorkflowInstanceUpdates updates = workflowInstance.getUpdates(); DBObject query = BasicDBObjectBuilder.start().add(fields._id, new ObjectId(workflowInstance.id)) .add(fields.lock, writeLock(workflowInstance.lock)).get(); BasicDBObject sets = new BasicDBObject(); BasicDBObject unsets = new BasicDBObject(); BasicDBObject update = new BasicDBObject(); if (updates.isEndChanged) { if (log.isDebugEnabled()) log.debug(" Workflow instance ended"); sets.append(fields.end, workflowInstance.end); sets.append(fields.duration, workflowInstance.duration); }//from w w w . j av a 2 s .c om // MongoDB can't combine updates of array elements together with // adding elements to that array. That's why we overwrite the whole // activity instance array when an update happened in there. // We do archive the ended (and joined) activity instances into a separate collection // that doesn't have to be loaded. if (updates.isActivityInstancesChanged) { if (log.isDebugEnabled()) log.debug(" Activity instances changed"); List<BasicDBObject> activityInstances = new ArrayList<>(); List<BasicDBObject> archivedActivityInstances = new ArrayList<>(); collectActivities(workflowInstance, activityInstances, archivedActivityInstances); sets.append(fields.activityInstances, activityInstances); if (!archivedActivityInstances.isEmpty()) { update.append("$push", new BasicDBObject(fields.archivedActivityInstances, archivedActivityInstances)); } } else { if (log.isDebugEnabled()) log.debug(" No activity instances changed"); } if (updates.isVariableInstancesChanged) { if (log.isDebugEnabled()) log.debug(" Variable instances changed"); writeVariables(sets, workflowInstance); } else { if (log.isDebugEnabled()) log.debug(" No variable instances changed"); } if (updates.isWorkChanged) { if (log.isDebugEnabled()) log.debug(" Work changed"); List<ObjectId> work = writeWork(workflowInstance.work); if (work != null) { sets.put(fields.work, work); } else { unsets.put(fields.work, 1); } } else { if (log.isDebugEnabled()) log.debug(" No work changed"); } if (updates.isAsyncWorkChanged) { if (log.isDebugEnabled()) log.debug(" Aync work changed"); List<ObjectId> workAsync = writeWork(workflowInstance.workAsync); if (workAsync != null) { sets.put(fields.workAsync, workAsync); } else { unsets.put(fields.workAsync, 1); } } else { if (log.isDebugEnabled()) log.debug(" No async work changed"); } if (!sets.isEmpty()) { update.append("$set", sets); } else { if (log.isDebugEnabled()) log.debug(" No sets"); } if (!unsets.isEmpty()) { update.append("$unset", unsets); } else { if (log.isDebugEnabled()) log.debug(" No unsets"); } if (!update.isEmpty()) { update(query, update, false, false, writeConcernFlushUpdates); } else { if (log.isDebugEnabled()) log.debug(" Nothing to flush"); } // reset the update tracking as all changes have been saved workflowInstance.trackUpdates(false); }
From source file:com.heisenberg.mongo.MongoWorkflowInstanceStore.java
License:Apache License
public WorkflowInstanceImpl lockWorkflowInstance(WorkflowInstanceQueryImpl processInstanceQuery) { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start(); if (processInstanceQuery.workflowInstanceId != null) { builder.add(fields._id, new ObjectId(processInstanceQuery.workflowInstanceId)); }/* w w w. j a v a 2 s . c o m*/ if (processInstanceQuery.activityInstanceId != null) { builder.add(fields.activityInstances + "." + fields._id, new ObjectId(processInstanceQuery.activityInstanceId)); } DBObject query = builder.push(fields.lock).add("$exists", false).pop().get(); DBObject update = BasicDBObjectBuilder.start().push("$set").push(fields.lock) .add(fields.time, Time.now().toDate()).add(fields.owner, processEngine.getId()).pop().pop().get(); DBObject retrieveFields = new BasicDBObject().append(fields.archivedActivityInstances, false); BasicDBObject dbProcessInstance = findAndModify(query, update, retrieveFields); if (dbProcessInstance == null) { return null; } WorkflowInstanceImpl workflowInstance = readProcessInstance(dbProcessInstance); workflowInstance.trackUpdates(false); return workflowInstance; }
From source file:com.ibm.ws.lars.rest.PersistenceBean.java
License:Apache License
/** {@inheritDoc} */ @Override/*from w w w . java 2 s . c o m*/ public void initialize() { // Make sure the fields we want to query are indexed DBCollection assets = db.getCollection(ASSETS_COLLECTION); DBCollection attachments = db.getCollection(ATTACHMENTS_COLLECTION); // Add text index BasicDBObjectBuilder textIndex = BasicDBObjectBuilder.start(); for (String indexField : searchIndexFields) { textIndex.add(indexField, "text"); } assets.ensureIndex(textIndex.get()); // Add Attachment(assetId) index attachments.ensureIndex(new BasicDBObject("assetId", 1)); }
From source file:com.jjorgemoura.hangmanz.model.ZDHangmanGame.java
private DBObject createDBObject() { List lettersList = new ArrayList(); BasicDBObjectBuilder docBuilder = BasicDBObjectBuilder.start(); BasicDBObjectBuilder lettersDocBuilder; int theOrder = 1; for (Iterator<Integer> it = this.playsRecord.keySet().iterator(); it.hasNext();) { Integer i = it.next();/*from w ww . j a v a 2 s.c o m*/ ZDAlphabet x = this.playsRecord.get(i); lettersDocBuilder = BasicDBObjectBuilder.start(); lettersDocBuilder.append("hm_letter", x.getLetter()); lettersDocBuilder.append("hm_letter_order", theOrder); lettersList.add(lettersDocBuilder.get()); theOrder++; } docBuilder.append("hm_uuid", this.uniqueUUID); docBuilder.append("hm_start_date", this.startDate.toString()); docBuilder.append("hm_latest_date", this.latestDate.toString()); docBuilder.append("hm_the_word", this.theWord.getName()); docBuilder.append("hm_category", this.category.getName()); docBuilder.append("hm_letters_played", lettersList); return docBuilder.get(); }