List of usage examples for com.mongodb DBCollection remove
public WriteResult remove(final DBObject query)
From source file:com.ikanow.infinit.e.data_model.utils.MongoTransactionLock.java
License:Apache License
protected synchronized void removeToken() { if (_bHaveControl) { DBCollection cachedCollection = _collections.get(); BasicDBObject queryObj = new BasicDBObject(); queryObj.put(hostname_, getHostname()); // (ie will only remove a lock I hold) cachedCollection.remove(queryObj); _bHaveControl = false;//from w ww . j a v a 2s . c o m } }
From source file:com.ikanow.infinit.e.processing.generic.synchronization.SynchronizationManager.java
License:Open Source License
/** * Does the DB sync, pulls all mongo docs that occured from the * cleanseStartTime and source and makes sure they are in the search db. * //from ww w .ja v a2 s . c o m * @param lastCleanse 1 hour before this harvester started * @param sources list of sources we are syncing * @return The number of errors fixed (docs deleted) */ // DON'T USE THIS UNTIL REWRITTEN - IT SHOULD TRANSFER DOCS ACROSS, NOT LEAVE THEM ALONE @Deprecated public int syncDB(long cleanseStartTime, Set<String> dbCache) { dbCache.clear(); int fixcount = 0; DBCollection contentDb = DbManager.getDocument().getContent(); DBCollection documentDb = DbManager.getDocument().getMetadata(); StoreAndIndexManager storeManager = new StoreAndIndexManager(); for (SourcePojo sp : sources) { // Don't combine the sources (apart from unusual multi-community case), because // that prevents you from using the compound sourceKey/_id index List<String> sourceKeyList = new ArrayList<String>(); sourceKeyList.addAll(sp.getDistributedKeys()); try { List<DocumentPojo> docs_to_remove = new ArrayList<DocumentPojo>(); //FIRST DO ALL NEW FEEDS BasicDBObject query = new BasicDBObject(); query.put(DocumentPojo._id_, new BasicDBObject(MongoDbManager.gt_, new ObjectId((int) (cleanseStartTime / 1000), 0, 0))); // time aspect query.put(DocumentPojo.sourceKey_, new BasicDBObject(MongoDbManager.in_, sourceKeyList)); //source aspect BasicDBObject queryFields = new BasicDBObject(); queryFields.append(DocumentPojo.url_, 1); queryFields.append(DocumentPojo.index_, 1); queryFields.append(DocumentPojo.sourceKey_, 1); DBCursor cur = documentDb.find(query, queryFields).batchSize(100); ElasticSearchManager esm = null; ElasticSearchManager esm_base = ElasticSearchManager.getIndex("document_index"); String sIndex = null; while (cur.hasNext()) { if (bKillMeNow) { return fixcount; } DocumentPojo doc = DocumentPojo.fromDb(cur.next(), DocumentPojo.class); if (null != doc.getId()) { dbCache.add(doc.getId().toString()); } // Get index of doc to check in: String sNewIndex = doc.getIndex(); if (null == sNewIndex) { sIndex = null; esm = esm_base; } else if ((null == sIndex) || (!sNewIndex.equals(sIndex))) { sIndex = sNewIndex; if (sNewIndex.equals("document_index")) { esm = esm_base; } else { esm = ElasticSearchManager.getIndex(sNewIndex + "/document_index"); } } //Compare mongo doc to search doc Map<String, GetField> results = esm.getDocument(doc.getId().toString(), DocumentPojo.url_); if (null == results || results.isEmpty()) { //either too many entries (duplicates) or no entry //delete this doc from both logger.info("db sync removing doc: " + doc.getId() + "/" + doc.getSourceKey() + " not found in search (or duplicate)"); docs_to_remove.add(doc); documentDb.remove(new BasicDBObject(DocumentPojo._id_, doc.getId())); BasicDBObject contentQ = new BasicDBObject(CompressedFullTextPojo.url_, doc.getUrl()); contentQ.put(CompressedFullTextPojo.sourceKey_, new BasicDBObject(MongoDbManager.in_, Arrays.asList(null, doc.getSourceKey()))); contentDb.remove(contentQ); fixcount++; } } //end loop over new docs for this source storeManager.removeFromSearch(docs_to_remove); //NOW VERIFY ALL OLD FEEDS int iteration = 1; boolean removedAll = true; docs_to_remove.clear(); while (removedAll) { int rows = iteration * iteration * 10; //10x^2 exponentially check more docs int oldfixes = 0; BasicDBObject queryOLD = new BasicDBObject(); queryOLD.put(DocumentPojo.sourceKey_, new BasicDBObject(MongoDbManager.in_, sourceKeyList)); //source aspect BasicDBObject sortOLD = new BasicDBObject(DocumentPojo._id_, 1); DBCursor curOLD = documentDb.find(queryOLD, queryFields).sort(sortOLD).limit(rows); while (curOLD.hasNext()) { DocumentPojo doc = DocumentPojo.fromDb(curOLD.next(), DocumentPojo.class); if (null != doc.getId()) { dbCache.add(doc.getId().toString()); } // Get index of doc to check in: String sNewIndex = doc.getIndex(); if (null == sNewIndex) { sIndex = null; esm = esm_base; } else if ((null == sIndex) || (!sNewIndex.equals(sIndex))) { sIndex = sNewIndex; if (sNewIndex.equals("document_index")) { esm = esm_base; } else { esm = ElasticSearchManager.getIndex(sNewIndex + "/document_index"); } } //Compare mongo doc to search doc Map<String, GetField> results = esm.getDocument(doc.getId().toString(), DocumentPojo.url_); if (null == results || results.isEmpty()) { //either too many entries (duplicates) or no entry //delete this doc from both logger.info("db sync removing doc: " + doc.getId() + "/" + doc.getSourceKey() + " not found in search (or duplicate)"); docs_to_remove.add(doc); documentDb.remove(new BasicDBObject(DocumentPojo._id_, doc.getId())); contentDb.remove(new BasicDBObject(DocumentPojo.url_, doc.getUrl())); fixcount++; oldfixes++; } } if (oldfixes != rows) removedAll = false; } //(end loop over old docs for this source) storeManager.removeFromSearch(docs_to_remove); } catch (Exception e) { // If an exception occurs log the error logger.error("Exception Message: " + e.getMessage(), e); } } return fixcount; }
From source file:com.ikanow.infinit.e.utility.MongoAssociationFeatureTxfer.java
License:Apache License
private void doDelete(BasicDBObject query, int nLimit) { try {/*w ww. j ava2 s. c o m*/ // Initialize the DB: DBCollection eventFeatureDB = DbManager.getFeature().getAssociation(); DBCursor cur = eventFeatureDB.find(query).limit(nLimit); // (this internally works in batches of 1000; just get _id) System.out.println("Found " + cur.count() + " records to delete"); if (nLimit > 0) { System.out.println("(limited to " + nLimit + " records)"); } ArrayList<AssociationFeaturePojo> events = new ArrayList<AssociationFeaturePojo>(); LinkedList<String> eventIds = new LinkedList<String>(); while (cur.hasNext()) { AssociationFeaturePojo event = AssociationFeaturePojo.fromDb(cur.next(), AssociationFeaturePojo.class); events.add(event); eventIds.add( new StringBuffer(event.getIndex()).append(":").append(event.getCommunityId()).toString()); eventFeatureDB.remove(new BasicDBObject("index", event.getIndex())); } ElasticSearchManager elasticManager = ElasticSearchManager.getIndex("association_index"); elasticManager.bulkDeleteDocuments(eventIds); } catch (NumberFormatException e) { e.printStackTrace(); } catch (MongoException e) { e.printStackTrace(); } }
From source file:com.ikanow.infinit.e.utility.MongoEntityFeatureTxfer.java
License:Apache License
static void doDelete(BasicDBObject query, int nLimit, boolean automatedRequest) { try {/* w w w .j a v a2s. c o m*/ // Initialize the DB: DBCollection entityFeatureDB = DbManager.getFeature().getEntity(); ElasticSearchManager elasticManager = ElasticSearchManager.getIndex("entity_index"); BasicDBObject fields = new BasicDBObject(); fields.put(EntityFeaturePojo.index_, 1); fields.put(EntityFeaturePojo.communityId_, 1); DBCursor cur = entityFeatureDB.find(query, fields).limit(nLimit); // (this internally works in batches of 1000) if (automatedRequest) { System.out.println("Found " + cur.count() + " records to delete from _id list"); } else { System.out.println("Found " + cur.count() + " records to delete from " + query.toString()); } if (nLimit > 0) { System.out.println("(limited to " + nLimit + " records)"); } int nArraySize = (cur.count() > 1000) ? 1000 : cur.count(); ArrayList<EntityFeaturePojo> batchList = new ArrayList<EntityFeaturePojo>(nArraySize); while (cur.hasNext()) { EntityFeaturePojo gp = EntityFeaturePojo.fromDb(cur.next(), EntityFeaturePojo.class); batchList.add(gp); if (batchList.size() >= nArraySize) { internalDelete(batchList, elasticManager); batchList.clear(); } } if (!batchList.isEmpty()) { internalDelete(batchList, elasticManager); } entityFeatureDB.remove(query); } catch (NumberFormatException e) { e.printStackTrace(); } catch (MongoException e) { e.printStackTrace(); } finally { } }
From source file:com.images3.data.impl.ImageAccessImplMongoDB.java
License:Apache License
public void deleteImage(ImageOS image) { DBCollection coll = getDatabase().getCollection("Image"); BasicDBObject criteria = new BasicDBObject().append("imagePlantId", image.getId().getImagePlantId()) .append("id", image.getId().getImageId()); WriteResult result = coll.remove(criteria); checkForAffectedDocuments(result, 1); }
From source file:com.images3.data.impl.ImageAccessImplMongoDB.java
License:Apache License
@Override public void deleteImages(String imagePlantId) { DBCollection coll = getDatabase().getCollection("Image"); BasicDBObject criteria = new BasicDBObject().append("imagePlantId", imagePlantId); coll.remove(criteria); }
From source file:com.images3.data.impl.ImagePlantAccessImplMongoDB.java
License:Apache License
public void deleteImagePlant(ImagePlantOS imagePlant) { DBCollection coll = getDatabase().getCollection("ImagePlant"); BasicDBObject criteria = new BasicDBObject().append("id", imagePlant.getId()); WriteResult result = coll.remove(criteria); checkForAffectedDocuments(result, 1); }
From source file:com.images3.data.impl.TemplateAccessImplMongoDB.java
License:Apache License
public void deleteTemplate(TemplateOS template) { DBCollection coll = getDatabase().getCollection("Template"); BasicDBObject criteria = new BasicDBObject().append("imagePlantId", template.getId().getImagePlantId()) .append("nameKey", template.getId().getTemplateName().toLowerCase()); WriteResult result = coll.remove(criteria); checkForAffectedDocuments(result, 1); }
From source file:com.images3.data.impl.TemplateAccessImplMongoDB.java
License:Apache License
public void deleteTemplatesByImagePlantId(String imagePlantId) { DBCollection coll = getDatabase().getCollection("Template"); BasicDBObject criteria = new BasicDBObject().append("imagePlantId", imagePlantId); coll.remove(criteria); }
From source file:com.impetus.kundera.client.MongoDBClient.java
License:Apache License
@Override public void delete(String idColumnName, String documentName, String rowId) throws Exception { DBCollection dbCollection = mongoDb.getCollection(documentName); //Find the DBObject to remove first BasicDBObject query = new BasicDBObject(); query.put(idColumnName, rowId);/*from w w w .j ava 2s . c o m*/ DBCursor cursor = dbCollection.find(query); DBObject documentToRemove = null; if (cursor.hasNext()) { documentToRemove = cursor.next(); } else { throw new PersistenceException( "Can't remove Row# " + rowId + " for " + documentName + " because record doesn't exist."); } dbCollection.remove(documentToRemove); }