List of usage examples for com.mongodb DBCollection insert
public WriteResult insert(final List<? extends DBObject> documents)
From source file:com.ibm.gaiandb.GaianDBUtilityProcedures.java
License:Open Source License
public static void populateMongo(String url, String collection, String csvKeyValueAssignments) throws Exception { apiStart(populateMongo, Arrays.asList(url, collection, csvKeyValueAssignments)); // mongo collection ~ rdbms table ; mongo document ~ rdbms record MongoConnectionParams connDetails = new MongoConnectionParams(url); DB mongoDb = MongoConnectionFactory.getMongoDB(connDetails); if (null == csvKeyValueAssignments) { if (mongoDb.collectionExists(collection)) mongoDb.getCollection(collection).drop(); return;//from w ww.j a va 2 s . co m } DBCollection dbcollection = mongoDb.collectionExists(collection) ? mongoDb.getCollection(collection) : mongoDb.createCollection(collection, null); String[] cellAssignments = Util.splitByCommas(csvKeyValueAssignments); BasicDBObject doc = new BasicDBObject(); for (String cellAssgnmt : cellAssignments) { int idx = cellAssgnmt.indexOf('='); if (1 > idx) continue; doc.put(cellAssgnmt.substring(0, idx), cellAssgnmt.substring(idx + 1)); } dbcollection.insert(doc); }
From source file:com.ibm.ws.lars.rest.PersistenceBean.java
License:Apache License
@Override public Asset createAsset(Asset newAsset) throws InvalidJsonAssetException { DBObject obj = new BasicDBObject(newAsset.getProperties()); convertHexIdToObjectId(obj);// www.ja va 2 s. c o m DBCollection coll = getAssetCollection(); if (logger.isLoggable(Level.FINE)) { logger.fine("createAsset: inserting object into the database: " + obj); } coll.insert(obj); Asset createdAsset = null; try { createdAsset = retrieveAsset((ObjectId) obj.get(ID)); } catch (NonExistentArtefactException e) { // This should not happen. If it does it is a repository bug throw new RepositoryException("Created asset could not be retrieved from the database.", e); } return createdAsset; }
From source file:com.ibm.ws.lars.rest.PersistenceBean.java
License:Apache License
/** * @param attachment/*from w w w . j a va 2 s . co m*/ * @return */ @Override public Attachment createAttachmentMetadata(Attachment attachment) { BasicDBObject state = new BasicDBObject(attachment.getProperties()); convertHexIdToObjectId(state); DBCollection coll = getAttachmentCollection(); if (logger.isLoggable(Level.FINE)) { logger.fine("createAttachmentMetadata: inserting new attachment " + state); } coll.insert(state); Object idObject = state.get(ID); String id; if (idObject instanceof String) { id = (String) idObject; } else if (idObject instanceof ObjectId) { id = ((ObjectId) idObject).toStringMongod(); } else { throw new AssertionError("_id should be either String of ObjectId"); } try { return retrieveAttachmentMetadata(id); } catch (NonExistentArtefactException e) { throw new RepositoryException("Created attachment could not be retrieved from the persistence store", e); } }
From source file:com.ifactory.service.weather.photo.PhotoService.java
License:Apache License
public boolean add(Photo photo) { DB db = mongoClient.getDB(this.dbName); DBCollection coll = db.getCollection(PHOTO_COLLECTION); BasicDBList geo = new BasicDBList(); geo.add(photo.getLatitude());//from w w w.j a va 2 s .c om geo.add(photo.getLongitude()); BasicDBObject query = new BasicDBObject("name", photo.getName()).append("day", photo.getDay()) .append("weather", photo.getWeatherId()).append("timestamp", photo.getTimestamp()) .append("geo", new BasicDBObject("type", "Point").append("coordinates", geo)); try { coll.insert(query); } catch (MongoException e) { return false; } return true; }
From source file:com.ijuru.ijambo.dao.PlayerDAO.java
License:Open Source License
/** * Saves a player//from w w w.j ava 2s . c o m * @param player the player */ public void save(Player player) { DBCollection players = db.getCollection("players"); remove(player); BasicDBObject obj = new BasicDBObject(); obj.put("identifier", player.getIdentifier()); obj.put("prevAnswer", player.getPrevAnswer()); players.insert(obj); }
From source file:com.ijuru.ijambo.dao.WordDAO.java
License:Open Source License
/** * Saves a word//from w w w . j av a 2s . c o m * @param word the word */ public void save(Word word) { DBCollection words = db.getCollection("words"); BasicDBObject obj = new BasicDBObject(); obj.put("word", word.getWord()); obj.put("meaning", word.getMeaning()); obj.put("difficulty", word.getDifficulty().ordinal()); words.insert(obj); }
From source file:com.ikanow.infinit.e.api.utils.RESTTools.java
License:Open Source License
/** * Creates a new session for a user, adding * an entry to our cookie table (maps cookieid * to userid) and starts the clock// www. jav a 2 s .c o m * * @param username * @param bMulti if true lets you login from many sources * @param bOverride if false will fail if already logged in * @return */ public static ObjectId createSession(ObjectId userid, boolean bMulti, boolean bOverride) { try { DBCollection cookieColl = DbManager.getSocial().getCookies(); if (!bMulti) { // Otherwise allow multiple cookies for this user //remove any old cookie for this user BasicDBObject dbQuery = new BasicDBObject(); dbQuery.put("profileId", userid); dbQuery.put("apiKey", new BasicDBObject(DbManager.exists_, false)); DBCursor dbc = cookieColl.find(dbQuery); if (bOverride) { while (dbc.hasNext()) { cookieColl.remove(dbc.next()); } } //TESTED else if (dbc.length() > 0) { return null; } //TESTED } //Find user //create a new entry CookiePojo cp = new CookiePojo(); ObjectId randomObjectId = generateRandomId(); cp.set_id(randomObjectId); cp.setCookieId(randomObjectId); cp.setLastActivity(new Date()); cp.setProfileId(userid); cp.setStartDate(new Date()); cookieColl.insert(cp.toDb()); //return cookieid return cp.getCookieId(); } catch (Exception e) { logger.error("Line: [" + e.getStackTrace()[2].getLineNumber() + "] " + e.getMessage()); e.printStackTrace(); } return null; }
From source file:com.ikanow.infinit.e.core.mapreduce.HadoopJobRunner.java
License:Open Source License
/** * Moves the output of a job from output_tmp to output and deletes * the tmp collection.// w w w. j av a 2 s . c o m * * @param cmr * @throws IOException * @throws ParserConfigurationException * @throws SAXException */ private void moveTempOutput(CustomMapReduceJobPojo cmr) throws IOException, SAXException, ParserConfigurationException { // If we are an export job then move files: bringTempOutputToFront(cmr); // (the rest of this will just do nothing) /** * Atomic plan: * If not append, move customlookup pointer to tmp collection, drop old collection. * If append, set sync flag (find/mod), move results from tmp to old, unset sync flag. * */ //step1 build out any of the post proc arguments DBObject postProcObject = null; boolean limitAllData = true; boolean hasSort = false; int limit = 0; BasicDBObject sort = new BasicDBObject(); try { postProcObject = (DBObject) com.mongodb.util.JSON .parse(getQueryOrProcessing(cmr.query, QuerySpec.POSTPROC)); if (postProcObject != null) { if (postProcObject.containsField("limitAllData")) { limitAllData = (Boolean) postProcObject.get("limitAllData"); } if (postProcObject.containsField("limit")) { limit = (Integer) postProcObject.get("limit"); if (postProcObject.containsField("sortField")) { String sfield = (String) postProcObject.get("sortField"); int sortDir = 1; if (postProcObject.containsField("sortDirection")) { sortDir = (Integer) postProcObject.get("sortDirection"); } sort.put(sfield, sortDir); hasSort = true; } else if (limit > 0) { //set a default sort because the user posted a limit sort.put("_id", -1); hasSort = true; } } } } catch (Exception ex) { _logger.info( "job_error_post_proc_title=" + cmr.jobtitle + " job_error_post_proc_id=" + cmr._id.toString() + " job_error_post_proc_message=" + HarvestExceptionUtils.createExceptionMessage(ex)); } //step 2a if not appending results then work on temp collection and swap to main if ((null == cmr.appendResults) || !cmr.appendResults) //format temp then change lookup pointer to temp collection { //transform all the results into necessary format: DBCursor dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp) .find(new BasicDBObject("key", null)).sort(sort).limit(limit); while (dbc_tmp.hasNext()) { DBObject dbo = dbc_tmp.next(); Object key = dbo.get("_id"); dbo.put("key", key); dbo.removeField("_id"); DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp).insert(dbo); } DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp) .remove(new BasicDBObject("key", null)); //swap the output collections BasicDBObject notappendupdates = new BasicDBObject(CustomMapReduceJobPojo.outputCollection_, cmr.outputCollectionTemp); notappendupdates.append(CustomMapReduceJobPojo.outputCollectionTemp_, cmr.outputCollection); DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id), new BasicDBObject(MongoDbManager.set_, notappendupdates)); String temp = cmr.outputCollectionTemp; cmr.outputCollectionTemp = cmr.outputCollection; cmr.outputCollection = temp; } else //step 2b if appending results then drop modified results in output collection { DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id), new BasicDBObject(MongoDbManager.set_, new BasicDBObject("isUpdatingOutput", true))); //remove any aged out results if ((null != cmr.appendAgeOutInDays) && cmr.appendAgeOutInDays > 0) { //remove any results that have aged out long ageOutMS = (long) (cmr.appendAgeOutInDays * MS_IN_DAY); Date lastAgeOut = new Date(((new Date()).getTime() - ageOutMS)); DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection).remove( new BasicDBObject("_id", new BasicDBObject(MongoDbManager.lt_, new ObjectId(lastAgeOut)))); } DBCursor dbc_tmp; if (!limitAllData) { //sort and limit the temp data set because we only want to process it dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp) .find(new BasicDBObject("key", null)).sort(sort).limit(limit); limit = 0; //reset limit so we get everything in a few steps (we only want to limit the new data) } else { dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp) .find(new BasicDBObject("key", null)); } DBCollection dbc = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection); //transform temp results and dump into output collection while (dbc_tmp.hasNext()) { DBObject dbo = dbc_tmp.next(); //transform the dbo to format {_id:ObjectId, key:(prev_id), value:value} Object key = dbo.get("_id"); dbo.put("key", key); dbo.removeField("_id"); //_id field should be automatically set to objectid when inserting now dbc.insert(dbo); } //if there is a sort, we need to apply it to all the data now if (hasSort) { ObjectId OID = new ObjectId(); BasicDBObject query = new BasicDBObject("_id", new BasicDBObject(MongoDbManager.lt_, OID)); //find everything inserted before now and sort/limit the data DBCursor dbc_sort = dbc.find(query).sort(sort).limit(limit); while (dbc_sort.hasNext()) { //reinsert the data into db (it should be in sorted order naturally now) DBObject dbo = dbc_sort.next(); dbo.removeField("_id"); dbc.insert(dbo); } //remove everything inserted before we reorganized everything (should leave only the new results in natural order) dbc.remove(query); } DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id), new BasicDBObject(MongoDbManager.set_, new BasicDBObject("isUpdatingOutput", false))); } //step3 clean up temp output collection so we can use it again // (drop it, removing chunks) try { DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp).drop(); } catch (Exception e) { } // That's fine, it probably just doesn't exist yet... }
From source file:com.ikanow.infinit.e.data_model.store.MongoDbManager.java
License:Apache License
@SuppressWarnings("deprecation") public static void main(String[] args) throws UnknownHostException { MongoClient mc = new MongoClient(args[0]); long tnow = 0; DB db = mc.getDB("test"); DBCollection test = db.getCollection("test123"); BasicDBObject outObj = new BasicDBObject(); int ITS = 1000; test.drop();// ww w . j a v a2 s .c o m boolean checkPerformance = false; boolean checkFunctionality = false; boolean checkErrors = false; // 1] Performance if (checkPerformance) { // ack'd db.setWriteConcern(WriteConcern.ACKNOWLEDGED); test.drop(); tnow = new Date().getTime(); for (int i = 0; i < ITS; ++i) { outObj.remove("_id"); outObj.put("val", i); test.save(outObj); } tnow = new Date().getTime() - tnow; System.out.println("1: Ack'd: " + tnow); // un ack'd db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); test.drop(); tnow = new Date().getTime(); outObj = new BasicDBObject(); for (int i = 0; i < ITS; ++i) { outObj.remove("_id"); outObj.put("val", i); test.save(outObj); } tnow = new Date().getTime() - tnow; System.out.println("2: unAck'd: " + tnow); // un ack'd but call getLastError db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); test.drop(); tnow = new Date().getTime(); outObj = new BasicDBObject(); for (int i = 0; i < ITS; ++i) { outObj.remove("_id"); outObj.put("val", i); test.save(outObj); db.getLastError(); } tnow = new Date().getTime() - tnow; test.drop(); System.out.println("3: unAck'd but GLEd: " + tnow); // ack'd override db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); test.drop(); tnow = new Date().getTime(); outObj = new BasicDBObject(); for (int i = 0; i < ITS; ++i) { outObj.remove("_id"); outObj.put("val", i); test.save(outObj, WriteConcern.ACKNOWLEDGED); db.getLastError(); } tnow = new Date().getTime() - tnow; System.out.println("4: unAck'd but ACKd: " + tnow); // Performance Results: // 2.6) (unack'd 100ms ... ack'd 27000) // 2.4) (same) } // 2] Functionality if (checkFunctionality) { // Unack: db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); WriteResult wr = test.update(new BasicDBObject(), new BasicDBObject(DbManager.set_, new BasicDBObject("val2", "x")), false, true); CommandResult cr = db.getLastError(); System.out.println("UNACK: wr: " + wr); System.out.println("UNACK: cr: " + cr); // bonus, check that we get N==0 when insert dup object WriteResult wr2 = test.insert(outObj); System.out.println("ACK wr2 = " + wr2.getN() + " all = " + wr2); CommandResult cr2 = db.getLastError(); System.out.println("ACK cr2 = " + cr2); // Ack1: db.setWriteConcern(WriteConcern.ACKNOWLEDGED); wr = test.update(new BasicDBObject(), new BasicDBObject(DbManager.set_, new BasicDBObject("val3", "x")), false, true); cr = db.getLastError(); System.out.println("ACK1: wr: " + wr); System.out.println("ACK1: cr: " + cr); // Ack2: db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); wr = test.update(new BasicDBObject(), new BasicDBObject(DbManager.set_, new BasicDBObject("val4", "x")), false, true, WriteConcern.ACKNOWLEDGED); cr = db.getLastError(); System.out.println("ACK2: wr: " + wr); System.out.println("ACK2: cr: " + cr); // bonus, check that we get N==0 when insert dup object wr2 = test.insert(outObj); System.out.println("ACK wr2 = " + wr2.getN() + " all = " + wr2); // Functionality results: // 2.6: unack wr == N/A, otherwise both have "n", "ok" // 2.4: unack wr == N/A all other wrs + crs identical } if (checkErrors) { //set up sharding DbManager.getDB("admin").command(new BasicDBObject("enablesharding", "test")); // Ack: try { test.drop(); test.createIndex(new BasicDBObject("key", 1)); BasicDBObject command1 = new BasicDBObject("shardcollection", "test.test123"); command1.append("key", new BasicDBObject("key", 1)); DbManager.getDB("admin").command(command1); db.setWriteConcern(WriteConcern.ACKNOWLEDGED); outObj = new BasicDBObject("key", "test"); test.save(outObj); WriteResult wr = test.update(new BasicDBObject(), new BasicDBObject(DbManager.set_, new BasicDBObject("key", "test2"))); System.out.println("ACK wr = " + wr); } catch (Exception e) { System.out.println("ACK err = " + e.toString()); } // UnAck: try { test.drop(); test.createIndex(new BasicDBObject("key", 1)); BasicDBObject command1 = new BasicDBObject("shardcollection", "test.test123"); command1.append("key", new BasicDBObject("key", 1)); DbManager.getDB("admin").command(command1); db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); outObj = new BasicDBObject("key", "test"); test.save(outObj); WriteResult wr = test.update(new BasicDBObject(), new BasicDBObject(DbManager.set_, new BasicDBObject("key", "test2")), false, false, WriteConcern.ACKNOWLEDGED); System.out.println("ACK override wr = " + wr); } catch (Exception e) { System.out.println("ACK override err = " + e.toString()); } // UnAck: try { test.drop(); test.createIndex(new BasicDBObject("key", 1)); BasicDBObject command1 = new BasicDBObject("shardcollection", "test.test123"); command1.append("key", new BasicDBObject("key", 1)); DbManager.getDB("admin").command(command1); db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); outObj = new BasicDBObject("key", "test"); test.save(outObj); WriteResult wr = test.update(new BasicDBObject(), new BasicDBObject(DbManager.set_, new BasicDBObject("key", "test2"))); System.out.println("UNACK wr = " + wr); } catch (Exception e) { System.out.println("UNACK err = " + e.toString()); } // UnAck + GLE: try { test.drop(); test.createIndex(new BasicDBObject("key", 1)); BasicDBObject command1 = new BasicDBObject("shardcollection", "test.test123"); command1.append("key", new BasicDBObject("key", 1)); DbManager.getDB("admin").command(command1); db.setWriteConcern(WriteConcern.UNACKNOWLEDGED); outObj = new BasicDBObject("key", "test"); test.save(outObj); WriteResult wr = test.update(new BasicDBObject(), new BasicDBObject(DbManager.set_, new BasicDBObject("key", "test2"))); CommandResult cr = db.getLastError(); System.out.println("UNACK GLE wr = " + wr); System.out.println("UNACK GLE cr = " + cr); } catch (Exception e) { System.out.println("UNACK GLE err = " + e.toString()); } // Error handling: // 2.6: // Ack - exception // Ack override - exception // UnAck - no error given // UnAck + GLE - gle error // 2.4: // Ack - exception // Ack override - exception // UnAck - no error given // UnAck + GLE - gle error } }
From source file:com.images3.data.impl.ImageAccessImplMongoDB.java
License:Apache License
public void insertImage(ImageOS image) { DBCollection coll = getDatabase().getCollection("Image"); coll.insert(getObjectMapper().mapToBasicDBObject(image)); }