List of usage examples for com.mongodb DBCollection insert
public WriteResult insert(final List<? extends DBObject> documents, final InsertOptions insertOptions)
Insert documents into a collection.
From source file:org.grails.datastore.mapping.mongo.MongoSession.java
License:Apache License
@Override @SuppressWarnings({ "rawtypes", "unchecked" }) protected void flushPendingInserts(final Map<PersistentEntity, Collection<PendingInsert>> inserts) { // Optimizes saving multipe entities at once for (final PersistentEntity entity : inserts.keySet()) { final MongoTemplate template = getMongoTemplate(entity.isRoot() ? entity : entity.getRootEntity()); final String collectionNameToUse = getCollectionName(entity.isRoot() ? entity : entity.getRootEntity()); template.execute(new DbCallback<Object>() { public Object doInDB(DB db) throws MongoException, DataAccessException { WriteConcern writeConcernToUse = writeConcern; writeConcernToUse = getDeclaredWriteConcern(writeConcernToUse, entity); final DBCollection collection = db.getCollection(collectionNameToUse); final Collection<PendingInsert> pendingInserts = inserts.get(entity); List<DBObject> dbObjects = new LinkedList<DBObject>(); List<PendingOperation> postOperations = new LinkedList<PendingOperation>(); for (PendingInsert pendingInsert : pendingInserts) { final List<PendingOperation> preOperations = pendingInsert.getPreOperations(); for (PendingOperation preOperation : preOperations) { preOperation.run(); }//ww w . j a v a 2 s . c o m dbObjects.add((DBObject) pendingInsert.getNativeEntry()); postOperations.addAll(pendingInsert.getCascadeOperations()); pendingInsert.run(); } WriteResult writeResult = collection.insert(dbObjects.toArray(new DBObject[dbObjects.size()]), writeConcernToUse); if (writeResult.getError() != null) { errorOccured = true; throw new DataIntegrityViolationException(writeResult.getError()); } for (PendingOperation pendingOperation : postOperations) { pendingOperation.run(); } return null; } }); } }
From source file:org.graylog2.metrics.MongoDbMetricsReporter.java
License:Open Source License
@Override public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final Date timestamp = new Date(clock.getTime()); List<DBObject> docs = Lists.newArrayListWithExpectedSize( gauges.size() + counters.size() + histograms.size() + meters.size() + timers.size()); collectGaugeReports(docs, gauges, timestamp); collectCounterReports(docs, counters, timestamp); collectHistogramReports(docs, histograms, timestamp); collectMeterReports(docs, meters, timestamp); collectTimerReports(docs, timers, timestamp); try {/*from w w w . j av a 2 s .co m*/ final DBCollection collection = mongoConnection.getDatabase().getCollection("graylog2_metrics"); // don't hang on to the data for too long. final BasicDBObject indexField = new BasicDBObject("timestamp", 1); final BasicDBObject indexOptions = new BasicDBObject("expireAfterSeconds", 5 * 60); collection.createIndex(indexField, indexOptions); collection.insert(docs, WriteConcern.UNACKNOWLEDGED); } catch (Exception e) { LOG.warn("Unable to write graylog2 metrics to mongodb. Ignoring this error.", e); } }
From source file:org.hibernate.ogm.datastore.mongodb.MongoDBDialect.java
License:LGPL
private void flushInserts(Map<DBCollection, BatchInsertionTask> inserts) { for (Map.Entry<DBCollection, BatchInsertionTask> entry : inserts.entrySet()) { DBCollection collection = entry.getKey(); if (entry.getValue().isEmpty()) { // has been emptied due to subsequent removals before flushes continue; }/* w w w . j a va 2 s .c om*/ try { collection.insert(entry.getValue().getAll(), entry.getValue().getWriteConcern()); } catch (DuplicateKeyException dke) { throw new TupleAlreadyExistsException(entry.getValue().getEntityKeyMetadata(), null, dke); } } inserts.clear(); }
From source file:org.mongodb.demos.replication.RetryDemo.java
License:Apache License
public static void main(String args[]) throws Exception { //TODO : See https://gist.github.com/tgrall/954aa021ba420639d614 MongoClient client = new MongoClient( Arrays.asList(new ServerAddress("localhost", 27017), new ServerAddress("localhost", 27018))); DB db = client.getDB("jug"); DBCollection coll = db.getCollection("bar"); System.out.println("BEFORE"); boolean loop = true; while (loop) { int backoff = 0, counter = 0; DBObject obj = null;/*from w ww .ja v a 2s . c om*/ do { try { obj = BasicDBObjectBuilder.start().add("name", "mydoc").add("counter", counter++).get(); //System.out.print("\t inserting..."); coll.insert(obj, new WriteConcern(2, 3000, true, false)); backoff = 0; // System.out.println(" OK : Document inserted..."); } catch (Exception e) { System.out.println(e.toString()); if (backoff == 3) { throw new Exception("Tried 3 times... still failed"); } backoff++; System.out.println("Waiting for " + backoff + "s"); Thread.sleep(1500 * backoff); } } while (backoff != 0); } System.out.println("AFTER"); }
From source file:org.s1.mongodb.cluster.MongoDBOperationLog.java
License:Apache License
@Override public void addToLocalLog(MessageBean m) { DBCollection coll = getCollection(); Map<String, Object> m1 = m.toMap(); m1.put("done", false); coll.insert(MongoDBFormat.fromMap(m1), WriteConcern.FSYNC_SAFE); if (LOG.isTraceEnabled()) { LOG.trace("Node write log new record: " + m.toString(true)); } else if (LOG.isDebugEnabled()) { LOG.debug("Node write log new record: " + m.toString(false)); }/*from w w w .j a va 2 s . c om*/ }
From source file:org.springframework.datastore.mapping.mongo.MongoSession.java
License:Apache License
@Override protected void flushPendingInserts(final Map<PersistentEntity, Collection<PendingInsert>> inserts) { // Optimizes saving multipe entities at once for (final PersistentEntity entity : inserts.keySet()) { final MongoTemplate template = getMongoTemplate(entity.isRoot() ? entity : entity.getRootEntity()); template.execute(new DbCallback<Object>() { @Override/* w ww. j a v a 2s.co m*/ public Object doInDB(DB db) throws MongoException, DataAccessException { final DBCollection collection = db.getCollection(template.getDefaultCollectionName()); final Collection<PendingInsert> pendingInserts = inserts.get(entity); List<DBObject> dbObjects = new LinkedList<DBObject>(); List<PendingOperation> postOperations = new LinkedList<PendingOperation>(); final MongoEntityPersister persister = (MongoEntityPersister) getPersister(entity); for (PendingInsert pendingInsert : pendingInserts) { final EntityAccess entityAccess = pendingInsert.getEntityAccess(); if (persister.fireBeforeInsert(entity, entityAccess)) continue; final List<PendingOperation> preOperations = pendingInsert.getPreOperations(); for (PendingOperation preOperation : preOperations) { preOperation.run(); } dbObjects.add((DBObject) pendingInsert.getNativeEntry()); postOperations.addAll(pendingInsert.getCascadeOperations()); } collection.insert(dbObjects.toArray(new DBObject[dbObjects.size()]), writeConcern); for (PendingOperation pendingOperation : postOperations) { pendingOperation.run(); } return null; } }); } }
From source file:org.teiid.translator.mongodb.MongoDBUpdateExecution.java
License:Open Source License
private void executeInternal() throws TranslatorException { DBCollection collection = getCollection(this.visitor.mongoDoc.getTargetTable()); MongoDocument mongoDoc = this.visitor.mongoDoc; AggregationOptions options = this.executionFactory.getOptions(this.executionContext.getBatchSize()); List<WriteResult> executionResults = new ArrayList<WriteResult>(); if (this.command instanceof Insert) { // get pull key based documents to embed LinkedHashMap<String, DBObject> embeddedDocuments = fetchEmbeddedDocuments(); // check if this document need to be embedded in any other document if (mongoDoc.isMerged()) { DBObject match = getInsertMatch(mongoDoc, this.visitor.columnValues); BasicDBObject insert = this.visitor.getInsert(embeddedDocuments); if (mongoDoc.getMergeKey().getAssociation() == Association.MANY) { removeParentKey(mongoDoc, insert); BasicDBObject insertDoc = new BasicDBObject(mongoDoc.getQualifiedName(true), insert); LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$push\": {" + insertDoc + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ executionResults.add(collection.update(match, new BasicDBObject("$push", insertDoc), false, //$NON-NLS-1$ true, WriteConcern.ACKNOWLEDGED)); } else { insert.remove("_id"); //$NON-NLS-1$ BasicDBObject insertDoc = new BasicDBObject(mongoDoc.getQualifiedName(true), insert); LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$set\": {" + insertDoc + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ executionResults.add(collection.update(match, new BasicDBObject("$set", insertDoc), false, true, //$NON-NLS-1$ WriteConcern.ACKNOWLEDGED)); }/* ww w.jav a 2 s . co m*/ } else { for (String docName : embeddedDocuments.keySet()) { DBObject embeddedDoc = embeddedDocuments.get(docName); embeddedDoc.removeField("_id"); //$NON-NLS-1$ } // gets its own collection BasicDBObject in = this.visitor.getInsert(embeddedDocuments); LogManager.logDetail(LogConstants.CTX_CONNECTOR, "{\"insert\": {" + in + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ executionResults.add(collection.insert(in, WriteConcern.ACKNOWLEDGED)); } } else if (this.command instanceof Update) { // get pull key based documents to embed LinkedHashMap<String, DBObject> embeddedDocuments = fetchEmbeddedDocuments(); DBObject match = new BasicDBObject(); if (this.visitor.match != null) { match = this.visitor.match; } if (mongoDoc.isMerged()) { // multi items in array update not available, http://jira.mongodb.org/browse/SERVER-1243 // this work-around for above issue List<String> parentKeyNames = parentKeyNames(mongoDoc); DBObject documentMatch = new BasicDBObject("$match", match); //$NON-NLS-1$ DBObject projection = new BasicDBObject("$project", buildProjectForUpdate(mongoDoc)); //$NON-NLS-1$ Cursor output = collection.aggregate(Arrays.asList(documentMatch, projection), options); while (output.hasNext()) { BasicDBObject row = (BasicDBObject) output.next(); buildUpdate(mongoDoc, collection, row, parentKeyNames, 0, null, executionResults, new UpdateOperationImpl()); } } else { for (String docName : embeddedDocuments.keySet()) { DBObject embeddedDoc = embeddedDocuments.get(docName); embeddedDoc.removeField("_id"); //$NON-NLS-1$ } BasicDBObject u = this.visitor.getUpdate(embeddedDocuments); LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$set\": {" + u + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ executionResults.add(collection.update(match, new BasicDBObject("$set", u), false, true, //$NON-NLS-1$ WriteConcern.ACKNOWLEDGED)); } // if the update is for the "embeddable" table, then since it is copied to other tables // those references need to be updated. I know this is not atomic operation, but not sure // how else to handle it. if (mongoDoc.isEmbeddable()) { updateReferenceTables(collection, mongoDoc, match, options); } } else { // Delete DBObject match = new BasicDBObject(); if (this.visitor.match != null) { match = this.visitor.match; } if (mongoDoc.isEmbeddable()) { DBObject m = new BasicDBObject("$match", match); //$NON-NLS-1$ Cursor output = collection.aggregate(Arrays.asList(m), options); while (output.hasNext()) { DBObject row = output.next(); if (row != null) { for (MergeDetails ref : mongoDoc.getEmbeddedIntoReferences()) { DBCollection parent = getCollection(ref.getParentTable()); DBObject parentMatch = buildParentMatch(row, ref); DBObject refMatch = new BasicDBObject("$match", parentMatch); //$NON-NLS-1$ Cursor referenceOutput = parent.aggregate(Arrays.asList(refMatch), options); if (referenceOutput.hasNext()) { throw new TranslatorException(MongoDBPlugin.Util.gs(MongoDBPlugin.Event.TEIID18010, this.visitor.mongoDoc.getTargetTable().getName(), ref.getParentTable())); } } } } } if (mongoDoc.isMerged()) { List<String> parentKeyNames = parentKeyNames(mongoDoc); DBObject documentMatch = new BasicDBObject("$match", match); //$NON-NLS-1$ DBObject projection = new BasicDBObject("$project", buildProjectForUpdate(mongoDoc)); //$NON-NLS-1$ Cursor output = collection.aggregate(Arrays.asList(documentMatch, projection), options); while (output.hasNext()) { BasicDBObject row = (BasicDBObject) output.next(); buildUpdate(mongoDoc, collection, row, parentKeyNames, 0, null, executionResults, new DeleteOperationImpl(match)); } } else { LogManager.logDetail(LogConstants.CTX_CONNECTOR, "remove - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$ executionResults.add(collection.remove(match, WriteConcern.ACKNOWLEDGED)); } } if (!executionResults.isEmpty()) { if (this.command instanceof Insert) { if (this.executionContext.getCommandContext().isReturnAutoGeneratedKeys()) { addAutoGeneretedKeys(executionResults.get(0)); } } int updated = 0; for (WriteResult result : executionResults) { updated += result.getN(); } this.results = new int[1]; this.results[0] = updated; } }
From source file:org.uom.fit.level2.datavis.repository.ChatServicesImpl.java
@Override public boolean saveChatData(ChatData chatData) { try {/* w w w. j a va2 s .c o m*/ Mongo mongo = new Mongo("localhost", 27017); DB db = mongo.getDB("datarepo"); DBCollection collection = db.getCollection("message"); Gson gson = new Gson(); DBObject dbObject = (DBObject) JSON.parse(gson.toJson(chatData)); WriteResult result = collection.insert(WriteConcern.SAFE, dbObject); return true; } catch (Exception e) { System.out.println("Exception Error createProect"); return false; } }
From source file:pubsub.broker.model.Publisher.java
public void addTopic(String title) { DBCollection coll = db.getCollection(DBConstants.TOPIC_COLLECTION); BasicDBObject query = new BasicDBObject(); query.put(DBConstants.TOPIC_TOPIC, title); query.put(DBConstants.TOPIC_EMAIL_LIST, new ArrayList<String>()); query.put(DBConstants.TOPIC_HOST_LIST, new ArrayList<String>()); coll.insert(query, WriteConcern.FSYNCED); }
From source file:rtpmt.models.Humidity.java
public void save() { DBCollection packageColl = db.getCollection(DBConstants.HUMIDITY_COLLECTION); /*/*from w w w . j a va 2 s. c o m*/ BasicDBObject query = new BasicDBObject(); query.put(DBConstants.SENSOR_ID, this.get(DBConstants.SENSOR_ID)); query.put(DBConstants.TRUCK_ID, this.get(DBConstants.TRUCK_ID)); query.put(DBConstants.PACKAGE_ID,this.get(DBConstants.PACKAGE_ID)); query.put(DBConstants.TIMESTAMP, this.get(DBConstants.TIMESTAMP)); BasicDBObject set = new BasicDBObject(); set.put("$set", this); packageColl.update(query, set, true, false);*/ packageColl.insert(this, WriteConcern.ACKNOWLEDGED); }