List of usage examples for com.mongodb DBCollection findOne
@Nullable
public DBObject findOne()
From source file:controllers.Onto.java
License:Open Source License
public static Result getDerivedGraph(String database) { authorizeCrossRequests();/*from ww w .ja v a2 s . co m*/ try { DB db = mongoConnect(database); DBCollection graphColl = db.getCollection("derivedGraph"); String response = graphColl.findOne().toString(); mongoClose(); return ok(response); } catch (Exception e) { return ok(e.getMessage()); } }
From source file:controllers.Onto.java
License:Open Source License
public static Result getOntologyTypes(String database) { authorizeCrossRequests();/* w w w . j av a 2 s. c o m*/ try { DB db = mongoConnect(database); DBCollection impactAndFlowTypesTreeColl = db.getCollection("impactAndFlowTypesTree"); String responseTree = impactAndFlowTypesTreeColl.findOne().toString(); DBCollection impactAndFlowTypesColl = db.getCollection("impactAndFlowTypes"); String responsePlain = impactAndFlowTypesColl.findOne().toString(); DBCollection relationTypesColl = db.getCollection("relationTypes"); String response = relationTypesColl.findOne().toString(); mongoClose(); return ok("{\"tree\": " + responseTree + ", \"plain\": " + responsePlain + ", \"relationTypes\": " + response + "}"); } catch (Exception e) { return ok(e.getMessage()); } }
From source file:controllers.Onto.java
License:Open Source License
public static Result getOntologyStats(String database) { authorizeCrossRequests();/*w w w .j a va 2 s . c o m*/ try { DB db = mongoConnect(database); DBCollection statsColl = db.getCollection("ontologyStats"); String response = statsColl.findOne().toString(); mongoClose(); return ok(response); } catch (Exception e) { return ok(e.getMessage()); } }
From source file:controllers.Onto.java
License:Open Source License
public static Result getLastReport(String database) { authorizeCrossRequests();/* www .jav a 2 s.co m*/ try { DB db = mongoConnect(database); DBCollection reportColl = db.getCollection("report"); String response = reportColl.findOne().toString(); mongoClose(); return ok(response); } catch (Exception e) { return ok(e.toString()); } }
From source file:edu.csulaerp.db.ReferenceMongo.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes no args//from ww w . j a v a2 s . c om * @throws UnknownHostException if it cannot connect to a MongoDB instance at localhost:27017 */ public static void main(final String[] args) throws UnknownHostException { // connect to the local database server MongoClient mongoClient = new MongoClient(); /* // Authenticate - optional MongoCredential credential = MongoCredential.createMongoCRCredential(userName, database, password); MongoClient mongoClient = new MongoClient(new ServerAddress(), Arrays.asList(credential)); */ // get handle to "mydb" DB db = mongoClient.getDB("mydb"); // get a list of the collections in this database and print them out Set<String> collectionNames = db.getCollectionNames(); for (final String s : collectionNames) { System.out.println(s); } // get a collection object to work with DBCollection coll = db.getCollection("testCollection"); // drop all the data in it coll.drop(); // make a document and insert it BasicDBObject doc = new BasicDBObject("name", "MongoDB").append("type", "database").append("count", 1) .append("info", new BasicDBObject("x", 203).append("y", 102)); coll.insert(doc); // get it (since it's the only one in there since we dropped the rest earlier on) DBObject myDoc = coll.findOne(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors for (int i = 0; i < 100; i++) { coll.insert(new BasicDBObject().append("i", i)); } System.out .println("total # of documents after inserting 100 small ones (should be 101) " + coll.getCount()); // lets get all the documents in the collection and print them out DBCursor cursor = coll.find(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a query to get 1 document out BasicDBObject query = new BasicDBObject("i", 71); cursor = coll.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // $ Operators are represented as strings query = new BasicDBObject("j", new BasicDBObject("$ne", 3)).append("k", new BasicDBObject("$gt", 10)); cursor = coll.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a range query to get a larger subset // find all where i > 50 query = new BasicDBObject("i", new BasicDBObject("$gt", 50)); cursor = coll.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // range query with multiple constraints query = new BasicDBObject("i", new BasicDBObject("$gt", 20).append("$lte", 30)); cursor = coll.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // Count all documents in a collection but take a maximum second to do so coll.find().maxTime(1, SECONDS).count(); // Bulk operations BulkWriteOperation builder = coll.initializeOrderedBulkOperation(); builder.insert(new BasicDBObject("_id", 1)); builder.insert(new BasicDBObject("_id", 2)); builder.insert(new BasicDBObject("_id", 3)); builder.find(new BasicDBObject("_id", 1)).updateOne(new BasicDBObject("$set", new BasicDBObject("x", 2))); builder.find(new BasicDBObject("_id", 2)).removeOne(); builder.find(new BasicDBObject("_id", 3)).replaceOne(new BasicDBObject("_id", 3).append("x", 4)); BulkWriteResult result = builder.execute(); System.out.println("Ordered bulk write result : " + result); // Unordered bulk operation - no guarantee of order of operation builder = coll.initializeUnorderedBulkOperation(); builder.find(new BasicDBObject("_id", 1)).removeOne(); builder.find(new BasicDBObject("_id", 2)).removeOne(); result = builder.execute(); System.out.println("Ordered bulk write result : " + result); // parallelScan ParallelScanOptions parallelScanOptions = ParallelScanOptions.builder().numCursors(3).batchSize(300) .build(); List<Cursor> cursors = coll.parallelScan(parallelScanOptions); for (Cursor pCursor : cursors) { while (pCursor.hasNext()) { System.out.println(pCursor.next()); } } // release resources db.dropDatabase(); mongoClient.close(); }
From source file:edu.emory.bmi.datacafe.mongo.MongoConnector.java
License:Open Source License
/** * Get only the values for a chosen sub set of attributes * * @param database the data base * @param collection the collection in the data base * @param ids the list of ids. * @param idAttribute The attribute key that is used as the ID. * @param preferredAttributes the attributes to be added. * @param removedAttributes the attributes to be removed. * @param addHeader should the headers be added. * @return the list of DBCursor./*from w w w .j a v a 2 s. c om*/ */ public List<String> getAttributeValues(String database, String collection, List ids, String idAttribute, String[] preferredAttributes, String[] removedAttributes, boolean addHeader) { MongoCollection mongoCollection = new MongoCollection(database, collection); DBCollection collection1 = mongoCollection.getCollection(); List<String> dbCursors = new ArrayList<>(); Set<String> keySet = collection1.findOne().keySet(); // Remove the mongo _id attribute. if ((MongoConstants.IS_ID_ATTRIBUTE_RANDOM_GENERATED) && (keySet.contains(MongoConstants.ID_ATTRIBUTE))) { keySet.remove(MongoConstants.ID_ATTRIBUTE); } HzServer.addValuesToMultiMap(datalakeID + DatacafeConstants.META_INDICES_MULTI_MAP_SUFFIX, DatacafeConstants.ATTRIBUTES_MAP_ENTRY_KEY, keySet); HzServer.addValueToMultiMap(datalakeID + DatacafeConstants.META_INDICES_MULTI_MAP_SUFFIX, DatacafeConstants.DATASOURCES_MAP_ENTRY_KEY, QueryWrapper.getDestinationInDataLakeFromDrill(database, collection)); for (String key : keySet) { if ((datalakeID != null) && !datalakeID.trim().equals("")) { HzServer.addValueToMultiMap(datalakeID, key, QueryWrapper.getDestinationInDataLakeFromDrill(database, collection)); } else { HzServer.addValueToMultiMap(key, QueryWrapper.getDestinationInDataLakeFromDrill(database, collection)); } } for (Object id : ids) { DBCursor results = collection1.find(new BasicDBObject(idAttribute, id), MongoUtil.getDBObjFromAttributes(preferredAttributes, removedAttributes)); String cursorValue; if (addHeader) { cursorValue = getCursorValues(DataSourcesRegistry.constructFullDataSourceName(database, collection), results, true); addHeader = false; } else { cursorValue = getCursorValues(DataSourcesRegistry.constructFullDataSourceName(database, collection), results); } dbCursors.add(cursorValue.trim()); } return dbCursors; }
From source file:edu.lander.twitter.TwitStreaming.java
public DBObject selectFirstRecordInCollection(DBCollection collection) { DBObject dbObject = collection.findOne(); return dbObject; }
From source file:edu.sjsu.carbonated.client.MongoDBDOA.java
License:Apache License
public static void main(String[] args) throws Exception { // connect to the local database server Mongo m = new Mongo(); // get handle to "mydb" DB db = m.getDB("mydb"); // Authenticate - optional // boolean auth = db.authenticate("foo", "bar"); // get a list of the collections in this database and print them out Set<String> colls = db.getCollectionNames(); for (String s : colls) { System.out.println(s);//from w ww . j a v a 2 s .c o m } // get a collection object to work with DBCollection coll = db.getCollection("testCollection"); // drop all the data in it coll.drop(); // make a document and insert it BasicDBObject doc = new BasicDBObject(); doc.put("test", new AlbumResource("name", "desc", "user", "asdf")); doc.put("name", "MongoDB"); doc.put("type", "database"); doc.put("count", 1); BasicDBObject info = new BasicDBObject(); info.put("x", 203); info.put("y", 102); doc.put("info", info); coll.insert(doc); // get it (since it's the only one in there since we dropped the rest earlier on) DBObject myDoc = coll.findOne(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors for (int i = 0; i < 100; i++) { coll.insert(new BasicDBObject().append("i", i)); } System.out .println("total # of documents after inserting 100 small ones (should be 101) " + coll.getCount()); // lets get all the documents in the collection and print them out DBCursor cur = coll.find(); while (cur.hasNext()) { System.out.println(cur.next()); } // now use a query to get 1 document out BasicDBObject query = new BasicDBObject(); query.put("i", 71); cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // now use a range query to get a larger subset query = new BasicDBObject(); query.put("i", new BasicDBObject("$gt", 50)); // i.e. find all where i > 50 cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // range query with multiple contstraings query = new BasicDBObject(); query.put("i", new BasicDBObject("$gt", 20).append("$lte", 30)); // i.e. 20 < i <= 30 cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // create an index on the "i" field coll.createIndex(new BasicDBObject("i", 1)); // create index on "i", ascending // list the indexes on the collection List<DBObject> list = coll.getIndexInfo(); for (DBObject o : list) { System.out.println(o); } // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); // see if any previous operation had an error System.out.println("Previous error : " + db.getPreviousError()); // force an error db.forceError(); // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); db.resetError(); }
From source file:es.devcircus.mongodb_examples.hello_world.Main.java
License:Open Source License
/** * Mtodo que nos permite recupear el primer documento de una coleccin * determinada, empleando para ello el mtodo finOne. *//* w ww . j a v a 2 s. c om*/ public static void findingTheFirstDocumentInACollectionUsingFindOne() { System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println(" Finding the First Document In A Collection using findOne() "); System.out.println("---------------------------------------------------------------"); System.out.println(); /*Finding the First Document In A Collection using findOne() To show that the document we inserted in the previous step is there, * we can do a simple findOne() operation to get the first document * in the collection. This method returns a single document (rather * than the DBCursor that the find() operation returns), and it's * useful for things where there only is one document, or you are * only interested in the first. You don't have to deal with the * cursor.*/ DBCollection coll = db.getCollection(TEST_COLLECTION); DBObject myDoc = coll.findOne(); System.out.print(" Informacin del elemento..: " + myDoc + "\n"); /*and you should see*/ /*{ "_id" : "49902cde5162504500b45c2c" , "name" : "MongoDB" , * "type" : "database" , "count" : 1 , "info" : { "x" : 203 , "y" : 102}} */ /*Note the _id element has been added automatically by MongoDB to your * document. Remember, MongoDB reserves element names that start * with "_"/"$" for internal use.*/ }
From source file:eu.artist.cloud.auditors.AbstractedAvailabilityLogger.java
License:Open Source License
/** * @param args/* www.ja v a 2 s.co m*/ */ public void logAvailability(ArrayList<String> TemplateIDs, String DBuser, String DBpass, String databaseIP) throws UnknownHostException, MongoException { // TODO Auto-generated method stub //needs to be periodic - but periodicity may be on the previous level-> Availability Auditor //better on the Availability Auditor level, since it will have refreshed also the state //needs to raise one thread per node, concurrent requests for all nodes? //sla is per template id? logically yes, so thread must be per template id //mongodb connection //we need to pass an object that will be the DB record and will contain all necessary information //this object will be transformed to json //more efficient to pass an arraylist of these objects (for all template IDs in one sample) and make once //the connection to the DB (for needed in the basic operation) for (String record : TemplateIDs) { Mongo mongoClient = new Mongo(databaseIP); DB db = mongoClient.getDB("3alib"); System.out.println("Host address for Backend DB:" + databaseIP); Set<String> colls = db.getCollectionNames(); for (String s : colls) { System.out.println("These are the collections... " + s); } DBCollection coll = db.getCollection("log_samples"); //log sample /*BasicDBObject doc = new BasicDBObject("name1", "MongoDB2"). append("type", "database"). append("count", 1). append("info", new BasicDBObject("x", 203).append("y", 102)); */ //DBObject obj=new DBObject(); JSON jsonObj = new JSON(); DBObject obj = (DBObject) jsonObj.parse(record); //BasicDBObject doc=new BasicDBObject(record); ObjectId obid = new ObjectId(); //System.out.println("This is the id:"+obj.get("_id")); coll.insert(obj); DBObject myDoc = coll.findOne(); //System.out.println(myDoc); //coll. mongoClient.close(); //log file must be per template ID so that it can be appended each time, if we start stop the auditing action //ideally templateID_month_year //return 0; } System.out.println("Records included"); }