List of usage examples for com.mongodb DBCollection count
public long count()
From source file:com.ikanow.infinit.e.api.knowledge.federated.SimpleFederatedQueryEngine.java
License:Open Source License
private BasicDBObject getCache(String url, SourceFederatedQueryConfigPojo endpoint) { if ((null != endpoint.cacheTime_days) && (endpoint.cacheTime_days <= 0)) { // cache disabled return null; }/*from ww w .j ava2 s . c om*/ DBCollection endpointCacheCollection = getCacheCollection(); if (!_staticInitializer) { _staticInitializer = true; endpointCacheCollection.ensureIndex(new BasicDBObject(SimpleFederatedCache.expiryDate_, 1)); } BasicDBObject cacheObj = (BasicDBObject) endpointCacheCollection .findOne(new BasicDBObject(SimpleFederatedCache._id_, url)); if (null == cacheObj) { return null; } // else found something, means there's stuff in the DB // so check it's not too big: Date now = new Date(); if ((-1 == _lastChecked) || (now.getTime() > (_lastChecked + (600L * 1000L)))) { // (only check every 10 minutes) if (endpointCacheCollection.count() > SimpleFederatedCache.QUERY_FEDERATION_CACHE_CLEANSE_SIZE) { _lastChecked = now.getTime(); // Remove everything with expiry date older than now endpointCacheCollection.remove(new BasicDBObject(SimpleFederatedCache.expiryDate_, new BasicDBObject(DbManager.lt_, new Date()))); } } //TESTED (4.3) Date expiryDate = cacheObj.getDate(SimpleFederatedCache.expiryDate_, now); if (now.getTime() < expiryDate.getTime()) { return cacheObj; } else { return null; } //TESTED (4.2) }
From source file:com.ikanow.infinit.e.api.knowledge.federated.SimpleFederatedQueryEngine.java
License:Open Source License
public void test_cacheFill(String testName, boolean fill, boolean shouldBeFull) { DBCollection endpointCacheCollection = getCacheCollection(); if (fill) {//ww w. ja v a 2s . co m for (long i = 0; i < (1 + SimpleFederatedCache.QUERY_FEDERATION_CACHE_CLEANSE_SIZE); ++i) { SimpleFederatedCache fakeCacheElement = new SimpleFederatedCache(); fakeCacheElement.expiryDate = new Date(new Date().getTime() - 3600L * 1000L); // (ie expired an hour ago) fakeCacheElement._id = testName + "_" + i; fakeCacheElement.cachedJson = new BasicDBObject(); endpointCacheCollection.save(fakeCacheElement.toDb()); } _lastChecked = new Date(new Date().getTime() - 602L * 1000L).getTime(); } long count = endpointCacheCollection.count(); if (shouldBeFull) { if (count < SimpleFederatedCache.QUERY_FEDERATION_CACHE_CLEANSE_SIZE) { System.out.println("*** " + testName + ": cache should just contain many elements, not: " + count); System.exit(-1); } } else { if (1 != count) { System.out.println("*** " + testName + ": cache should just contain one element, not: " + count); System.exit(-1); } } }
From source file:com.intellijob.MongoConfiguration.java
License:Apache License
@Autowired @Bean//from w w w . jav a2 s. c o m public Boolean doImportData(MongoClient mongoClient) throws IOException { DBCollection sys_import_collection = mongoClient.getDB(this.properties.getDatabase()) .getCollection(ApplicationSettings.COLLECTION_NAME); if (isProduction && sys_import_collection.count() == 0) { LOG.info("IMPORT DATA =============================================>"); //Import collection skill_caegories. loadCollectionSkillCategories(mongoClient); //Import languages loadSkillsData(mongoClient, "skill_languages.json", "skill_languages"); //Import knowledges loadSkillsData(mongoClient, "skill_knowledge.json", "skill_knowledges"); //Import personal strength loadSkillsData(mongoClient, "skill_personalstrengths.json", "skill_personalstrengths"); DBObject row = new BasicDBObject(); row.put(ApplicationSettings.FIELD_MONGO_DATA_IMPORTED, true); row.put(ApplicationSettings.FIELD_MONGO_DATA_IMPORTED_DATE, new Date()); row.put(ApplicationSettings.FIELD_ELASTIC_DATA_IMPORTED, false); row.put(ApplicationSettings.FIELD_ELASTIC_DATA_IMPORTED_DATE, null); sys_import_collection.insert(row); LOG.info("IMPORT DATA FINISHED!"); return true; } return false; }
From source file:com.jaspersoft.mongodb.importer.MongoDbSimpleImporter.java
License:Open Source License
private void populate(MongoDbConnection connection, String collectionName, Resource scriptResource) throws JRException { DBCollection collection = null; DB mongoDatabase = null;//from www . j av a 2s . co m try { mongoDatabase = connection.getMongoDatabase(); if (!mongoDatabase.collectionExists(collectionName)) { logger.info("Collection \"" + collectionName + "\" doesn't exist"); DBObject options = new BasicDBObject("capped", false); collection = mongoDatabase.createCollection(collectionName, options); } else { logger.info("Collection \"" + collectionName + "\" exists"); collection = mongoDatabase.getCollectionFromString(collectionName); collection.drop(); logger.info("Collection \"" + collectionName + "\" was cleaned up"); } } catch (MongoException e) { logger.error(e); } if (mongoDatabase == null) { throw new JRException( "Failed connection to mongoDB database: " + connection.getMongoURIObject().getDatabase()); } FileInputStream fileInputStream = null; InputStreamReader inputStreamReader = null; BufferedReader reader = null; try { inputStreamReader = new InputStreamReader(scriptResource.getInputStream()); reader = new BufferedReader(inputStreamReader); StringBuilder stringBuilder = new StringBuilder(); String currentLine; while ((currentLine = reader.readLine()) != null) { stringBuilder.append(currentLine); } Object parseResult = JSON.parse(stringBuilder.toString()); if (!(parseResult instanceof BasicDBList)) { throw new JRException( "Unsupported type: " + parseResult.getClass().getName() + ". It must be a list"); } BasicDBList list = (BasicDBList) parseResult; List<DBObject> objectsList = new ArrayList<DBObject>(); for (int index = 0; index < list.size(); index++) { objectsList.add((DBObject) list.get(index)); } collection.insert(objectsList); logger.info("Collection count: " + collection.count() + "\nSuccessfully populated collection: " + collectionName); } catch (UnsupportedEncodingException e) { logger.error(e); } catch (IOException e) { logger.error(e); } finally { if (fileInputStream != null) { try { fileInputStream.close(); } catch (IOException e) { logger.error(e); } } if (inputStreamReader != null) { try { inputStreamReader.close(); } catch (IOException e) { e.printStackTrace(); } } if (reader != null) { try { reader.close(); } catch (IOException e) { e.printStackTrace(); } } } }
From source file:com.liferay.mongodb.hook.service.impl.MongoExpandoRowLocalServiceImpl.java
License:Open Source License
@Override public int getRowsCount(long companyId, long classNameId, String tableName) { DBCollection dbCollection = MongoDBUtil.getCollection(companyId, classNameId, tableName); return (int) dbCollection.count(); }
From source file:com.mycompany.bean.PlytaService.java
public String insertDocument(String DBName, String CollectionName, Plyta PlytaInstance) throws Exception { try {//from ww w .j a v a2 s . c om DBCollection collection = getConnection(DBName, CollectionName); BasicDBObject utwor; List<BasicDBObject> utwory = new ArrayList<BasicDBObject>(); if (PlytaInstance.getUtwory() != null) { for (int i = 0; i < PlytaInstance.getUtwory().size(); i++) { utwor = new BasicDBObject(); utwor.put("nazwa", PlytaInstance.getUtwory().get(i).getNazwa()); utwor.put("dlugosc", PlytaInstance.getUtwory().get(i).getDlugosc()); utwory.add(utwor); } } BasicDBObject document = new BasicDBObject("tytul", PlytaInstance.getTytul()) .append("autor", PlytaInstance.getAutor()) .append("liczbaUtworow", PlytaInstance.getLiczbaUtworow()) .append("wytwornia", PlytaInstance.getWytwornia()) .append("rokWydania", PlytaInstance.getRokWydania()) .append("producent", PlytaInstance.getProducent()).append("gatunek", PlytaInstance.getGatunek()) .append("dlugosc", PlytaInstance.getDlugosc()).append("single", PlytaInstance.getSingle()) .append("nagrody", PlytaInstance.getNagrody()) .append("rodzajAlbumu", PlytaInstance.getRodzajAlbumu()).append("utwory", utwory); collection.insert(document); log.log(Level.INFO, "Document inserted"); return "Pomyslnie dodano dokument: " + collection.count(); } catch (Exception e) { log.log(Level.SEVERE, "Document inserting error! Exception thrown"); System.out.println("Blad podczas dodawania dokumentu:" + e.getClass().getName()); System.out.println("wiadomosc: " + e.getMessage()); e.printStackTrace(); return null; } }
From source file:com.original.service.channel.config.Initializer.java
License:Open Source License
/** * // w w w. j a va 2s . c om * @param db * @param collectionName * @param fileName * @param force * @throws IOException */ private void initCollection(DB db, String collectionName, String fileName, boolean force) throws IOException { DBCollection collection = db.getCollection(collectionName); if (collection.getCount() > 0 && !force) { logger.info(collectionName + " had existed!"); return; } if (force && collection.getCount() > 0) { collection.drop(); logger.info("force to init, drop the collection:" + collectionName); } BufferedReader br = null; if (fileName.startsWith("/")) { InputStream is = Initializer.class.getResourceAsStream(fileName); br = new BufferedReader(new InputStreamReader(is)); } else { br = new BufferedReader(new FileReader(fileName)); } StringBuffer fileText = new StringBuffer(); String line; while ((line = br.readLine()) != null) { fileText.append(line); } logger.info(collectionName + " Read:" + fileText); // System.out.println(profile); br.close(); if (fileText != null) { // convert JSON to DBObject directly List<String> list = parseJsonItemsFile(fileText); for (String txt : list) { logger.info(collectionName + " init item:" + txt); DBObject dbObject = (DBObject) JSON.parse(txt); collection.insert(dbObject); } } logger.info(collectionName + " init Done:" + collection.count()); }
From source file:com.socialsky.mods.MongoPersistor.java
License:Apache License
private void doCount(Message<JsonObject> message) { String collection = getMandatoryString("collection", message); if (collection == null) { return;// w w w. j a v a 2s .c o m } JsonObject matcher = message.body().getObject("matcher"); DBCollection coll = db.getCollection(collection); long count; if (matcher == null) { count = coll.count(); } else { count = coll.count(jsonToDBObject(matcher)); } JsonObject reply = new JsonObject(); reply.putNumber("count", count); sendOK(message, reply); }
From source file:com.tengen.FindUsers.java
License:Apache License
public static void main(String[] args) throws UnknownHostException { MongoClient client = new MongoClient(); DB db = client.getDB("blog"); DBCollection collection = db.getCollection("users"); //collection.drop(); // insert 10 documents with a random integer as the value of field "x" System.out.println("\nFind all: "); DBCursor cursor = collection.find(new BasicDBObject("_id", "andrzej1")); try {// w w w .j a v a 2 s.c o m while (cursor.hasNext()) { DBObject cur = cursor.next(); System.out.println(cur.get("_id")); } } finally { cursor.close(); } System.out.println("\nCount:"); long count = collection.count(); System.out.println(count); }
From source file:com.tomtom.speedtools.mongodb.migratedb.MongoDBMigration.java
License:Apache License
/** * Used to modify top-level documents. Documents will be stored in the collection when modified. * * @param db Database./*from w w w . ja v a 2 s. c om*/ * @param collectionName Collection to iterate over. * @return Iterable to loop over all documents. */ @Nonnull protected Iterable<DBObject> migrateCollection(@Nonnull final MongoDB db, @Nonnull final String collectionName) { assert db != null; assert collectionName != null; rootContext.flush(); final DBCollection collection = db.getCollection(collectionName); final long count = collection.count(); if (count > Integer.MAX_VALUE) { addProblem("", "Collection has too many records (" + count + ", where " + Integer.MAX_VALUE + " is max)"); } /** * This set is going to to contain all records for sure, so make sure it is large enough not to get * re-allocated all the time. * * See HashMap's class description at [http://docs.oracle.com/javase/6/docs/api/java/util/HashMap.html], * specifically "The expected number of entries in the map and its load factor should be taken into account * when setting its initial capacity, so as to minimize the number of rehash operations. If the initial * capacity is greater than the maximum number of entries divided by the load factor, no rehash operations * will ever occur.". */ @SuppressWarnings("NumericCastThatLosesPrecision") final Set<Object> recordIds = new HashSet<>((int) ((double) count / 0.75) + 1); return new IterableDelegate<DBObject, DBObject>(collection.find()) { private int index = 1; @Nullable @Override public DBObject next(@Nonnull final DBObject value) { final Context context = rootContext.createChild(value, collectionName + ':' + index); index++; // Each document should have an _id field. final Object id = value.get("_id"); if (id == null) { addProblem(context.path, "Document has no _id field: " + value); return null; } // Don't process records we have already processed. This can happen if a record // is modified. if (recordIds.contains(id)) { return null; } recordIds.add(id); // Keep original value in immutable string, referenced from 'flush()'. final String originalStringValue = value.toString(); // Save object. context.add(new Command() { @Override public void flush() { // If the new value differs from the old one, store it and print it. final String stringValue = value.toString(); if (!originalStringValue.equals(stringValue)) { if (!dryRun) { collection.save(value); } LOG.debug(context.path + " - original document: " + originalStringValue); LOG.debug(context.path + " - migrated document: " + value); } } @Override public int ranking() { return Integer.MAX_VALUE; // Saves should be executed last. } }); return value; } }; }