List of usage examples for com.mongodb DBCursor count
public int count()
From source file:com.linuxbox.enkive.statistics.gathering.past.AttachmentsPastGatherer.java
License:Open Source License
protected Map<String, Object> getConsolidatedData(Date start, Date end, int grain) { Map<String, Object> result = new HashMap<String, Object>(); Map<String, Object> query = new HashMap<String, Object>(); Map<String, Object> innerQuery = new HashMap<String, Object>(); innerQuery.put("$gte", start); innerQuery.put("$lt", end); query.put(MONGO_UPLOAD_DATE, innerQuery); long dataByteSz = 0; DBCursor dataCursor = attachmentsColl.find(new BasicDBObject(query)); for (DBObject obj : dataCursor) { dataByteSz += (Long) (obj.get(MONGO_LENGTH)); }/*from w w w . j a v a 2 s .c om*/ Map<String, Object> innerNumAttach = new HashMap<String, Object>(); innerNumAttach.put(CONSOLIDATION_AVG, dataCursor.count()); Map<String, Object> innerAttachSz = new HashMap<String, Object>(); long avgAttSz = 0; if (dataCursor.count() != 0) { avgAttSz = dataByteSz / dataCursor.count(); } innerAttachSz.put(CONSOLIDATION_AVG, avgAttSz); Map<String, Object> innerAttArchiveSize = new HashMap<String, Object>(); innerAttArchiveSize.put(CONSOLIDATION_AVG, attachmentsColl.count()); Map<String, Object> dateMap = new HashMap<String, Object>(); dateMap.put(CONSOLIDATION_MIN, start); dateMap.put(CONSOLIDATION_MAX, end); result.put(STAT_ATTACH_SIZE, innerAttachSz); result.put(STAT_ATTACH_NUM, innerNumAttach); result.put(STAT_ATTACH_ARCHIVE_SIZE, innerAttArchiveSize); result.put(STAT_TIMESTAMP, dateMap); result.put(CONSOLIDATION_TYPE, grain); result.put(STAT_GATHERER_NAME, gathererName); return result; }
From source file:com.miya.twit.mongodb.DBConnectSentiment.java
public void getTweetWithUserId(int userId) { DBCollection collection = dbConnection(); if (collection != null) { DBCursor cursor = collection.find(); try {//from ww w . j a v a2 s .c om while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } //------------------------------------ // get documents by query BasicDBObject query = new BasicDBObject("userid", new BasicDBObject("$gt", userId)); cursor = collection.find(query); System.out.println("twit bulundu : " + cursor.count()); // // /** // * ** Update *** // */ // //update documents found by query "age > 30" with udpateObj "age = 20" // BasicDBObject newDocument = new BasicDBObject(); // newDocument.put("age", 20); // // BasicDBObject updateObj = new BasicDBObject(); // updateObj.put("$set", newDocument); // // collection.update(query, updateObj, false, true); // // /** // * ** Find and display *** // */ // cursor = collection.find(query); // System.out.println("Person with age > 40 after update --> " + cursor.count()); // // // //get all again // cursor = collection.find(); // try { // while (cursor.hasNext()) { // System.out.println(cursor.next()); // } // } finally { // cursor.close(); // } } }
From source file:com.mulesoft.quartz.mongo.MongoDBJobStore.java
License:Open Source License
public boolean removeTrigger(TriggerKey triggerKey) throws JobPersistenceException { BasicDBObject dbObject = keyAsDBObject(triggerKey); DBCursor find = triggerCollection.find(dbObject); if (find.count() > 0) { triggerCollection.remove(dbObject); return true; }//from w w w . j av a 2s . c om return false; }
From source file:com.mysema.query.mongodb.MongodbQuery.java
License:Apache License
protected List<Object> getIds(Class<?> targetType, Predicate condition) { DBCollection collection = getCollection(targetType); // TODO : fetch only ids DBCursor cursor = createCursor(collection, condition, QueryModifiers.EMPTY, Collections.<OrderSpecifier<?>>emptyList()); if (cursor.hasNext()) { List<Object> ids = new ArrayList<Object>(cursor.count()); for (DBObject obj : cursor) { ids.add(obj.get("_id")); }/*from w w w . j a v a 2 s. c om*/ return ids; } else { return Collections.emptyList(); } }
From source file:com.novemberain.quartz.mongodb.MongoDBJobStore.java
License:Open Source License
public boolean replaceTrigger(TriggerKey triggerKey, OperableTrigger newTrigger) throws JobPersistenceException { OperableTrigger trigger = retrieveTrigger(triggerKey); if (trigger == null) { return false; }/*www . ja v a2s.c om*/ if (!trigger.getJobKey().equals(newTrigger.getJobKey())) { throw new JobPersistenceException("New trigger is not related to the same job as the old trigger."); } // Can't call remove trigger as if the job is not durable, it will remove the job too BasicDBObject dbObject = Keys.keyToDBObject(triggerKey); DBCursor triggers = triggerCollection.find(dbObject); if (triggers.count() > 0) { triggerCollection.remove(dbObject); } // Copy across the job data map from the old trigger to the new one. newTrigger.getJobDataMap().putAll(trigger.getJobDataMap()); try { storeTrigger(newTrigger, false); } catch (JobPersistenceException jpe) { storeTrigger(trigger, false); // put previous trigger back... throw jpe; } return true; }
From source file:com.novemberain.quartz.mongodb.MongoDBJobStore.java
License:Open Source License
private void doAcquireNextTriggers(Map<TriggerKey, OperableTrigger> triggers, Date noLaterThanDate, int maxCount) throws JobPersistenceException { BasicDBObject query = new BasicDBObject(); query.put(TRIGGER_NEXT_FIRE_TIME, new BasicDBObject("$lte", noLaterThanDate)); DBCursor cursor = triggerCollection.find(query); BasicDBObject sort = new BasicDBObject(); sort.put(TRIGGER_NEXT_FIRE_TIME, Integer.valueOf(1)); cursor.sort(sort);/*from www . ja va2 s . c o m*/ log.debug("Found {} triggers which are eligible to be run.", cursor.count()); while (cursor.hasNext() && maxCount > triggers.size()) { DBObject dbObj = cursor.next(); OperableTrigger trigger = toTrigger(dbObj); try { if (trigger == null) { continue; } if (triggers.containsKey(trigger.getKey())) { log.debug("Skipping trigger {} as we have already acquired it.", trigger.getKey()); continue; } if (trigger.getNextFireTime() == null) { log.debug("Skipping trigger {} as it has no next fire time.", trigger.getKey()); // No next fire time, so delete it removeTrigger(trigger.getKey()); continue; } // deal with misfires if (applyMisfire(trigger)) { log.debug("Misfire trigger {}.", trigger.getKey()); Date nextFireTime = trigger.getNextFireTime(); if (nextFireTime == null) { log.debug("Removing trigger {} as it has no next fire time after the misfire was applied.", trigger.getKey()); // No next fire time, so delete it removeTrigger(trigger.getKey()); continue; } // The trigger has misfired and was rescheduled, its firetime may be too far in the future // and we don't want to hang the quartz scheduler thread up on <code>sigLock.wait(timeUntilTrigger);</code> // so, check again that the trigger is due to fire if (nextFireTime.after(noLaterThanDate)) { log.debug("Skipping trigger {} as it misfired and was scheduled for {}.", trigger.getKey(), trigger.getNextFireTime()); continue; } } log.debug("Inserting lock for trigger {}", trigger.getKey()); BasicDBObject lock = new BasicDBObject(); lock.put(KEY_NAME, dbObj.get(KEY_NAME)); lock.put(KEY_GROUP, dbObj.get(KEY_GROUP)); lock.put(LOCK_INSTANCE_ID, instanceId); lock.put(LOCK_TIME, new Date()); // A lock needs to be written with FSYNCED to be 100% effective across multiple servers locksCollection.insert(lock, WriteConcern.FSYNCED); log.debug("Aquired trigger {}", trigger.getKey()); triggers.put(trigger.getKey(), trigger); } catch (DuplicateKey e) { // someone else acquired this lock. Move on. log.debug("Failed to acquire trigger {} due to a lock", trigger.getKey()); BasicDBObject lock = new BasicDBObject(); lock.put(KEY_NAME, dbObj.get(KEY_NAME)); lock.put(KEY_GROUP, dbObj.get(KEY_GROUP)); DBObject existingLock; DBCursor lockCursor = locksCollection.find(lock); if (lockCursor.hasNext()) { existingLock = lockCursor.next(); // support for trigger lock expirations if (isTriggerLockExpired(existingLock)) { log.warn("Lock for trigger {} is expired - removing lock and retrying trigger acquisition", trigger.getKey()); removeTriggerLock(trigger); doAcquireNextTriggers(triggers, noLaterThanDate, maxCount - triggers.size()); } } else { log.warn("Error retrieving expired lock from the database. Maybe it was deleted"); doAcquireNextTriggers(triggers, noLaterThanDate, maxCount - triggers.size()); } } } }
From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java
License:Apache License
public DBObject mapReduce(int key, String property, Filter filter, List<Integer> bins) { LOG.debug("Starting mapReduce for the following property: {}", property); long start = System.currentTimeMillis(); Property prop = getCache().getProperty(property); String propType = prop.getType(); String map = ""; String map2 = ""; String reduce = ""; if (propType.equals(PropertyType.STRING.toString()) || propType.equals(PropertyType.BOOL.toString())) { map = "function() {\n" + " property = '" + property + "';\n" + " for (mr in this.metadata){\n" + " metadataRecord=this.metadata[mr];\n" + " if(metadataRecord.property == property)\n" + " {\n" + " if (metadataRecord.status == 'CONFLICT'){\n" + " emit({\n" + " property: property,\n" + " value: 'CONFLICT'\n" + " }, 1);\n" + " } else {\n" + " emit({\n" + " property: property,\n" + " value: metadataRecord.sourcedValues[0].value\n" + " }, 1);\n" + "\n" + " }\n" + " return;\n" + " }\n" + " }\n" + " emit({\n" + " property: property,\n" + " value: 'Unknown'\n" + " }, 1);\n" + "}"; reduce = "function reduce(key, values) {\n" + " var res = 0;\n" + " values.forEach(function(v) {\n" + " res += v;\n" + " });\n" + " return res;\n" + "}"; } else if (propType.equals(PropertyType.INTEGER.toString()) || propType.equals(PropertyType.FLOAT.toString())) { map = "function() {\n" + " property = '" + property + "';\n" + " thresholds = " + getBinThresholds(bins) + ";\n" + " for (mr in this.metadata){\n" + " metadataRecord=this.metadata[mr];\n" + " if(metadataRecord.property == property){\n" + " if (metadataRecord.status == 'CONFLICT'){\n" + " emit({\n" + " property: property,\n" + " value: 'CONFLICT'\n" + " }, 1);\n" + " } else {\n" + " var val=metadataRecord.sourcedValues[0].value;\n" + " var skipped=false;\n" + " if (thresholds.length > 0)\n" + " for (t in thresholds){\n" + " threshold = thresholds[t]; \n" + " if (val>=threshold[0] && val<=threshold[1]){\n" + " emit({\n" + " property: property,\n" + " value: threshold[0]+'-'+threshold[1]\n" + " }, 1);\n" + " skipped=true;\n" + " break;\n" + " }\n" + " }\n" + " }\n" + " return;\n" + " }\n" + " }\n" + " emit({\n" + " property: property,\n" + " value: 'Unknown'\n" + " }, 1);\n" + "}"; reduce = "function reduce(key, values) {\n" + " var res = 0;\n" + " values.forEach(function(v) {\n" + " res += v;\n" + " });\n" + " return res;\n" + "}"; } else if (propType.equals(PropertyType.DATE.toString())) { map = "function() {\n" + " property = '" + property + "';\n" + " for (mr in this.metadata){\n" + " metadataRecord=this.metadata[mr];\n" + " if(metadataRecord.property == property){\n" + " if (metadataRecord.status == 'CONFLICT'){\n" + " emit({\n" + " property: property,\n" + " value: 'CONFLICT'\n" + " }, 1);\n" + " } else {\n" + " var date = new Date(metadataRecord.sourcedValues[0].value);\n" + " var val=date.getFullYear();\n" + " emit({\n" + " property: property,\n" + " value: val\n" + " }, 1);\n" + " }\n" + " return;\n" + " }\n" + " }\n" + " emit({\n" + " property: property,\n" + " value: 'Unknown'\n" + " }, 1);\n" + "}"; reduce = "function reduce(key, values) {\n" + " var res = 0;\n" + " values.forEach(function(v) {\n" + " res += v;\n" + " });\n" + " return res;\n" + "}"; }/*from w w w .j a v a 2 s . c o m*/ DBObject query = this.getCachedFilter(filter); LOG.debug("Filter query is:\n{}", query); String queryString = query.toString(); DBCollection elmnts = getCollection(Element.class); MapReduceCommand cmd = new MapReduceCommand(elmnts, map, reduce, null, INLINE, query); MapReduceOutput output = elmnts.mapReduce(cmd); // List<BasicDBObject> results = (List<BasicDBObject>) output.getCommandResult().get( "results" ); Iterator<DBObject> iterator = output.results().iterator(); List<BasicDBObject> results = new ArrayList<BasicDBObject>(); while (iterator.hasNext()) { results.add((BasicDBObject) iterator.next()); } LOG.debug("MapReduce produced {} results", results.size()); DBCollection histCollection = this.db.getCollection(TBL_HISTOGRAMS); BasicDBObject old = new BasicDBObject("_id", key); BasicDBObject res = new BasicDBObject(old.toMap()); res.put("results", results); histCollection.update(old, res, true, false); DBCursor cursor = histCollection.find(new BasicDBObject("_id", key)); if (cursor.count() == 0) { return null; } long end = System.currentTimeMillis(); LOG.debug("MapReduce took {} seconds", (end - start) / 1000); return (DBObject) cursor.next().get("results"); }
From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java
License:Apache License
public DBObject mapReduceAllValues(int key, String property, Filter filter, List<Integer> bins) { LOG.debug("Starting mapReduce for the following property: {}", property); long start = System.currentTimeMillis(); Property prop = getCache().getProperty(property); String propType = prop.getType(); String map = ""; String map2 = ""; String reduce = ""; if (propType.equals(PropertyType.STRING.toString()) || propType.equals(PropertyType.BOOL.toString())) { map = "function() {\n" + " property = '" + property + "';\n" + " for (mr in this.metadata){\n" + " metadataRecord=this.metadata[mr];\n" + " if(metadataRecord.property == property)\n" + " {\n" + " for (i in metadataRecord.sourcedValues)\n" + " {\n" + " sv=metadataRecord.sourcedValues[i];\n" + " emit({\n" + " property: property,\n" + " value: sv.value\n" + " }, 1);\n" + "\n" + " }\n" + " return;\n" + " }\n" + " }\n" + " emit({\n" + " property: property,\n" + " value: 'Unknown'\n" + " }, 1);\n" + "}"; reduce = "function reduce(key, values) {\n" + " var res = 0;\n" + " values.forEach(function(v) {\n" + " res += v;\n" + " });\n" + " return res;\n" + "}"; } else if (propType.equals(PropertyType.INTEGER.toString()) || propType.equals(PropertyType.FLOAT.toString())) { map = "function() {\n" + " property = '" + property + "';\n" + " thresholds = " + getBinThresholds(bins) + ";\n" + " for (mr in this.metadata)" + " {\n" + " metadataRecord=this.metadata[mr];\n" + " if(metadataRecord.property == property)" + " {\n" + " for (i in metadataRecord.sourcedValues)" + " {\n" + " sv=metadataRecord.sourcedValues[i];\n" + " var val=sv.value;\n" + " if (thresholds.length > 0)\n" + " for (t in thresholds){\n" + " threshold = thresholds[t]; \n" + " if (val>=threshold[0] && val<=threshold[1]){\n" + " emit({\n" + " property: property,\n" + " value: threshold[0]+'-'+threshold[1]\n" + " }, 1);\n" + " }\n" + " }\n" + " }\n" + " return;\n" + " }\n" + " }\n" + " emit({\n" + " property: property,\n" + " value: 'Unknown'\n" + " }, 1);\n" + "}"; reduce = "function reduce(key, values) {\n" + " var res = 0;\n" + " values.forEach(function(v) {\n" + " res += v;\n" + " });\n" + " return res;\n" + "}"; } else if (propType.equals(PropertyType.DATE.toString())) { map = "function() {\n" + " property = '" + property + "';\n" + " for (mr in this.metadata){\n" + " metadataRecord=this.metadata[mr];\n" + " if(metadataRecord.property == property){\n" + " for (i in metadataRecord.sourcedValues){\n" + " sv=metadataRecord.sourcedValues[i];\n" + " var date = new Date(sv.value);\n" + " var val=date.getFullYear();\n" + " emit({\n" + " property: property,\n" + " value: val\n" + " }, 1);\n" + " }\n" + " return;\n" + " }\n" + " }\n" + " emit({\n" + " property: property,\n" + " value: 'Unknown'\n" + " }, 1);\n" + "}"; reduce = "function reduce(key, values) {\n" + " var res = 0;\n" + " values.forEach(function(v) {\n" + " res += v;\n" + " });\n" + " return res;\n" + "}"; }/*w w w . ja v a2 s .co m*/ DBObject query = this.getCachedFilter(filter); LOG.debug("Filter query is:\n{}", query); String queryString = query.toString(); DBCollection elmnts = getCollection(Element.class); MapReduceCommand cmd = new MapReduceCommand(elmnts, map, reduce, null, INLINE, query); MapReduceOutput output = elmnts.mapReduce(cmd); // List<BasicDBObject> results = (List<BasicDBObject>) output.getCommandResult().get( "results" ); Iterator<DBObject> iterator = output.results().iterator(); List<BasicDBObject> results = new ArrayList<BasicDBObject>(); while (iterator.hasNext()) { results.add((BasicDBObject) iterator.next()); } LOG.debug("MapReduce produced {} results", results.size()); DBCollection histCollection = this.db.getCollection(TBL_HISTOGRAMS); BasicDBObject old = new BasicDBObject("_id", key); BasicDBObject res = new BasicDBObject(old.toMap()); res.put("results", results); histCollection.update(old, res, true, false); DBCursor cursor = histCollection.find(new BasicDBObject("_id", key)); if (cursor.count() == 0) { return null; } long end = System.currentTimeMillis(); LOG.debug("MapReduce took {} seconds", (end - start) / 1000); return (DBObject) cursor.next().get("results"); }
From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java
License:Apache License
public DBObject mapReduceStats(int key, String property, Filter filter) { LOG.debug("Starting mapReduceStats for the following property: {}", property); long start = System.currentTimeMillis(); Property prop = getCache().getProperty(property); String propType = prop.getType(); String map = ""; String reduce = ""; String finalize = ""; if (propType.equals(PropertyType.INTEGER.toString()) || propType.equals(PropertyType.FLOAT.toString())) { map = "function() {\n" + " property = '" + property + "';\n" + " for (mr in this.metadata){\n" + " metadataRecord=this.metadata[mr];\n" + " if(metadataRecord.property == property){\n" + " {\n" + " emit({\n" + " property: property,\n" + " value: property\n" + " }, \n" + " {\n" + " sum: metadataRecord.sourcedValues[0].value,\n" + " min: metadataRecord.sourcedValues[0].value,\n" + " max: metadataRecord.sourcedValues[0].value,\n" + " count: 1,\n" + " diff: 0\n" + " }\n" + " )\n" + " }\n" + " return;\n" + " }\n" + " }\n" + " emit({\n" + " property: property,\n" + " value: 'Unknown'\n" + " }, 1);\n" + "}\n"; reduce = "function reduce(key, values) {\n" + "var a = values[0];\n" + " for (var i = 1; i < values.length; i++) {\n" + " var b = values[i];\n" + " var delta = a.sum / a.count - b.sum / b.count;\n" + " var weight = (a.count * b.count) / (a.count + b.count);\n" + " a.diff += b.diff + delta * delta * weight;\n" + " a.sum = b.sum*1+ a.sum*1;\n" + " a.count += b.count;\n" + " a.min = Math.min(a.min, b.min);\n" + " a.max = Math.max(a.max, b.max);\n" + " }\n" + "return a;" + "}" ;/* w w w .j a v a 2s . c o m*/ finalize = "function finalize(key, value) {\n" + " value.avg = value.sum / value.count;\n" + " value.variance = value.diff / value.count;\n" + " value.stddev = Math.sqrt(value.variance);\n" + " return value;\n" + "}"; } DBObject query = this.getCachedFilter(filter); LOG.debug("filter query is:\n{}", query); DBCollection elmnts = getCollection(Element.class); MapReduceCommand cmd = new MapReduceCommand(elmnts, map, reduce, null, INLINE, query); cmd.setFinalize(finalize); MapReduceOutput output = elmnts.mapReduce(cmd); //List<BasicDBObject> results = (List<BasicDBObject>) output.getCommandResult().get( "results" ); Iterator<DBObject> iterator = output.results().iterator(); List<BasicDBObject> results = new ArrayList<BasicDBObject>(); while (iterator.hasNext()) { results.add((BasicDBObject) iterator.next()); } LOG.debug("MapReduce produced {} results", results.size()); DBCollection histCollection = this.db.getCollection(TBL_HISTOGRAMS); BasicDBObject old = new BasicDBObject("_id", key); BasicDBObject res = new BasicDBObject(old.toMap()); res.put("results", results); histCollection.update(old, res, true, false); DBCursor cursor = histCollection.find(new BasicDBObject("_id", key)); if (cursor.count() == 0) { return null; } long end = System.currentTimeMillis(); LOG.debug("The map-reduce job took {} seconds", (end - start) / 1000); return (DBObject) cursor.next().get("results"); }
From source file:com.querydsl.mongodb.AbstractMongodbQuery.java
License:Apache License
protected List<Object> getIds(Class<?> targetType, Predicate condition) { DBCollection collection = getCollection(targetType); // TODO : fetch only ids DBCursor cursor = createCursor(collection, condition, null, QueryModifiers.EMPTY, Collections.<OrderSpecifier<?>>emptyList()); if (cursor.hasNext()) { List<Object> ids = new ArrayList<Object>(cursor.count()); for (DBObject obj : cursor) { ids.add(obj.get("_id")); }/*from w w w . ja v a 2 s . c om*/ return ids; } else { return Collections.emptyList(); } }