Example usage for com.mongodb DBCollection update

List of usage examples for com.mongodb DBCollection update

Introduction

In this page you can find the example usage for com.mongodb DBCollection update.

Prototype

public WriteResult update(final DBObject query, final DBObject update) 

Source Link

Document

Modify an existing document.

Usage

From source file:com.health.smart.util.MongoC.java

public static void update(String collection, String document) throws Exception {
    DBCollection coll = getClient().getDB(database).getCollection(collection);
    DBObject docObj = (DBObject) JSON.parse(document);
    coll.update((DBObject) JSON.parse((String) docObj.get("criteria")),
            (DBObject) JSON.parse((String) docObj.get("update")));
}

From source file:com.health.smart.util.MongoC.java

public static void update(String collection, DBObject original, DBObject newObj) throws Exception {
    DBCollection coll = getClient().getDB(database).getCollection(collection);
    coll.update(original, newObj);
}

From source file:com.ibm.bluemix.smartveggie.dao.UserDaoImpl.java

@Override
public BasicDBObject updateUser(UserDTO userDTO) {
    BasicDBObject updates = null;//w  w  w . j a v a  2 s.  c om
    try {
        System.out.println("Updating Users...");
        DB db = MongodbConnection.getMongoDB();
        DBCollection col = db.getCollection(ICollectionName.COLLECTION_USERS);

        BasicDBObject query = new BasicDBObject();
        query.append("userName", userDTO.getUserName());
        System.out.println("Updating Record: " + query);

        updates = new BasicDBObject();
        if ((userDTO.getFirstName() != null) && (userDTO.getFirstName() != ""))
            updates.append("firstName", userDTO.getFirstName());
        if ((userDTO.getLastName() != null) && (userDTO.getLastName() != ""))
            updates.append("lastName", userDTO.getLastName());
        if ((userDTO.getAddressLine1() != null) && (userDTO.getAddressLine1() != ""))
            updates.append("addressLine1", userDTO.getAddressLine1());
        if ((userDTO.getAddressLine2() != null) && (userDTO.getAddressLine2() != ""))
            updates.append("addressLine2", userDTO.getAddressLine2());
        if ((userDTO.getAge() > 0) && (userDTO.getAge() < 100))
            updates.append("age", userDTO.getAge());
        if ((userDTO.getSex() != null) && (userDTO.getSex() != ""))
            updates.append("sex", userDTO.getSex());
        if ((userDTO.getUserName() != null) && (userDTO.getUserName() != ""))
            updates.append("userName", userDTO.getUserName());
        if ((userDTO.getPassword() != null) && (userDTO.getPassword() != ""))
            updates.append("password", userDTO.getPassword());
        if ((userDTO.getCity() != null) && (userDTO.getCity() != ""))
            updates.append("city", userDTO.getCity());
        if ((userDTO.getPinCode() != null) && (userDTO.getPinCode() != ""))
            updates.append("pin", userDTO.getPinCode());
        if ((userDTO.getUserTypeCode() != null) && (userDTO.getUserTypeCode() != "")) {
            updates.append("userType", userDTO.getUserTypeCode());
            if (userDTO.getUserTypeCode().equalsIgnoreCase("vendor")) {
                if ((userDTO.getLicenseNo() != null) && (userDTO.getLicenseNo() != "")) {
                    updates.append("licenseNo", userDTO.getLicenseNo());

                    //Process the date field
                    SimpleDateFormat formatter = new SimpleDateFormat("dd/mm/yyyy");
                    String validFrom = userDTO.getValidFrom();
                    String validTo = userDTO.getValidTo();

                    try {

                        Date validFromDate = formatter.parse(validFrom);
                        Date validToDate = formatter.parse(validTo);
                        System.out.println(validFromDate);
                        updates.append("validFrom", validFromDate);
                        updates.append("validTo", validToDate);
                        //System.out.println(formatter.format(validFromDate));

                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            } else if (userDTO.getUserTypeCode().equalsIgnoreCase("regulator")) {
                if (userDTO.getRegulatingCityCode() != null && userDTO.getRegulatingCityCode() != null) {
                    updates.append("regulatingCityCode", userDTO.getRegulatingCityCode());
                }
                if (userDTO.getRegulatingCityCode() != null && userDTO.getRegulatingCityCode() != null) {
                    updates.append("regulatingCityName", userDTO.getRegulatingCityName());
                }
            }
        }
        System.out.println("Querying for update: " + query);
        col.update(query, updates);
        System.out.println("col after update" + col.toString() + col.getCount());

    } catch (Exception e) {
        throw e;
    }
    return updates;
}

From source file:com.ibm.ws.lars.rest.PersistenceBean.java

License:Apache License

@Override
public Asset updateAsset(String assetId, Asset asset)
        throws InvalidJsonAssetException, NonExistentArtefactException {
    if (!Objects.equals(assetId, asset.get_id())) {
        throw new InvalidJsonAssetException("The specified asset id does not match the specified asset.");
    }/*from   w  w w  .j  av a  2 s .c o m*/

    DBCollection coll = getAssetCollection();

    ObjectId objId = new ObjectId(assetId);
    DBObject query = makeQueryById(objId);

    DBObject obj = new BasicDBObject(asset.getProperties());
    convertHexIdToObjectId(obj);

    if (logger.isLoggable(Level.FINE)) {
        String msg = "updateAsset: query object: " + query + "\nupdated asset:" + obj;
        logger.fine(msg);
    }

    coll.update(query, obj);

    return retrieveAsset(objId);
}

From source file:com.ikanow.infinit.e.harvest.extraction.document.HarvestStatus_Integrated.java

License:Open Source License

/**
 * updateHarvestStatus//from w w  w .  ja va 2 s.c o m
 * Currently I am using the key to find the old source to update
 * should switch sourcepojo to use correct id field and search on that.
 * 
 * @param sourceToUpdate
 * @param harvestDate
 * @param harvestStatus
 * @param harvestMessage
 * @param bTempDisable
 * @param bPermDisable
 */
public void update(SourcePojo sourceToUpdate, Date harvestDate, HarvestEnum harvestStatus,
        String harvestMessage, boolean bTempDisable, boolean bPermDisable) {
    // Handle successful harvests where the max docs were reached, so don't want to respect the searchCycle
    if ((harvestStatus == HarvestEnum.success) && (sourceToUpdate.reachedMaxDocs())) {
        harvestStatus = HarvestEnum.success_iteration;
    }
    if ((null != harvestMessage) && !harvestMessage.isEmpty()) {
        this.logMessage(harvestMessage, false);
        if (HarvestEnum.error == harvestStatus) {
            _numMessages++;
        }
    } //TOTEST

    if (null == sourceToUpdate.getHarvestStatus()) {
        sourceToUpdate.setHarvestStatus(new SourceHarvestStatusPojo());
    }

    BasicDBObject update = new BasicDBObject();
    // (annoyingly need to do this in raw format because otherwise overwrite any existing fields eg synced,doccount)
    if ((null == sourceToUpdate.getDistributionFactor()) || (HarvestEnum.in_progress != harvestStatus)) {
        update.put(SourceHarvestStatusPojo.sourceQuery_harvest_status_, harvestStatus.toString());
    }
    update.put(SourceHarvestStatusPojo.sourceQuery_harvested_, harvestDate);
    update.put(SourceHarvestStatusPojo.sourceQuery_realHarvested_, harvestDate);
    sourceToUpdate.getHarvestStatus().setHarvest_status(harvestStatus);
    sourceToUpdate.getHarvestStatus().setHarvested(harvestDate);
    sourceToUpdate.getHarvestStatus().setRealHarvested(harvestDate);

    // Optional fields:
    // Display message
    if (null == _currMessage) {
        // (then also no harvest message else would have logged already)
        sourceToUpdate.getHarvestStatus().setHarvest_message("");
    } //TESTED
    else { // New messages to display
        String date = new SimpleDateFormat("'['yyyy-MM-dd'T'HH:mm:ss']' ").format(new Date());
        if ((null != _messages) && !_messages.isEmpty()) {
            _currMessage.append('\n');
            _currMessage.append(getLogMessages(true)); // (clears _messages)
        }
        sourceToUpdate.getHarvestStatus().setHarvest_message(date + _currMessage.toString());
    } //TESTED
      //(end display message)

    if (sourceToUpdate.getHarvestStatus().getHarvest_message().length() > 0) {
        // (only overwrite the previous message if there's actually something to say...)
        update.put(SourceHarvestStatusPojo.sourceQuery_harvest_message_,
                sourceToUpdate.getHarvestStatus().getHarvest_message());

        if ((null != sourceToUpdate.getDistributionTokens())
                && !sourceToUpdate.getDistributionTokens().isEmpty()) {
            for (Integer token : sourceToUpdate.getDistributionTokens()) {
                update.put(SourceHarvestStatusPojo.sourceQuery_distributedStatus_ + "." + token.toString(),
                        sourceToUpdate.getHarvestStatus().getHarvest_message());
            }
        } //TESTED
    }
    if (bTempDisable) {
        sourceToUpdate.setHarvestBadSource(true);
        update.put(SourcePojo.harvestBadSource_, true);
    }
    if (bPermDisable) {
        if ((null == sourceToUpdate.getSearchCycle_secs()) || (0 == sourceToUpdate.getSearchCycle_secs())) {
            sourceToUpdate.setSearchCycle_secs(-1);
        } else if (sourceToUpdate.getSearchCycle_secs() > 0) { //(else it's already negative, ie run manually)
            sourceToUpdate.setSearchCycle_secs(-sourceToUpdate.getSearchCycle_secs());
        }
        update.put(SourcePojo.searchCycle_secs_, sourceToUpdate.getSearchCycle_secs());
    }
    DBCollection sourceDb = DbManager.getIngest().getSource();
    BasicDBObject query = new BasicDBObject(SourcePojo._id_, sourceToUpdate.getId());
    sourceDb.update(query, new BasicDBObject(MongoDbManager.set_, update));
}

From source file:com.ikanow.infinit.e.processing.generic.store_and_index.StoreAndIndexManager.java

License:Open Source License

/**
 * Remove a doc from the data store//w w  w .  j a  v a2 s  .co m
 * @param col
 * @param doc - assumes _id set
 * @param fields - fields to retrieve (set in outside the doc loop for performance, url, index, sourceKey)
 * 
 * CALLED FROM:   removeFromDataStore_byId(col, List<doc>, bDeleteContent) 
 *                removeFromDataStore_byId(List<doc>, bDeleteContent) 
 *                   resizeDB() <- _ID, SOURCEKEY, INDEX, SOURCEURL
 */
private void removeFromDatastore_byId(DBCollection col, DocumentPojo doc) {

    boolean bDeleteContent = docHasExternalContent(doc.getUrl(), doc.getSourceUrl());

    if (bDeleteContent) {
        // Remove its content also:
        if (!_diagnosticMode) {
            BasicDBObject contentQuery = new BasicDBObject(DocumentPojo.url_, doc.getUrl());
            contentQuery.put(DocumentPojo.sourceKey_, doc.getSourceKey());
            DbManager.getDocument().getContent().remove(contentQuery);
        } else {
            System.out.println("StoreAndIndexManager.removeFromDatastore_byId, delete content: "
                    + doc.getSourceKey() + "/" + doc.getUrl());
        }
    }

    // Update Mongodb with the data
    BasicDBObject query = new BasicDBObject();
    query.put(DocumentPojo.sourceKey_, SourcePojo.getDistributedKeyQueryTerm(doc.getSourceKey())); // (needed because on newer machines this is the shard key)
    query.put(DocumentPojo._id_, doc.getId());

    if (!_diagnosticMode) {
        BasicDBObject softDelete = getSoftDeleteUpdate();
        col.update(query, softDelete);
        // (can do this on sharded collections because it uses sourceKey+_id, the shard key)
    } else { // (diagnostic mode)
        if (null != col.findOne(query)) {
            System.out
                    .println("StoreAndIndexManager.removeFromDatastore_byId, delete: " + doc.toDb().toString());
        } else {
            System.out.println("StoreAndIndexManager.removeFromDatastore_byId, delete: DOC NOT FOUND");
        }
    }
}

From source file:com.ikanow.infinit.e.utility.MongoDocumentTxfer.java

License:Apache License

private void doTransfer(BasicDBObject query, int nSkip, int nLimit, boolean bAggregate, BasicDBObject chunk)
        throws IOException {
    PropertiesManager pm = new PropertiesManager();
    int nMaxContentSize_bytes = pm.getMaxContentSize();

    // Initialize the DB:

    DBCollection docsDB = DbManager.getDocument().getMetadata();
    DBCollection contentDB = DbManager.getDocument().getContent();
    DBCollection sourcesDB = DbManager.getIngest().getSource();

    ElasticSearchManager.setDefaultClusterName("infinite-aws");

    // 1. Get the documents from the DB (combining data + metadata and refreshing source meta)

    // (Ignore soft-deleted records:)
    if (null == query) {
        query = new BasicDBObject();
    }/* w w  w.  j av  a 2s  .co m*/
    Object sourceKeyQueryTerm = query.remove(DocumentPojo.sourceKey_);
    if (null != sourceKeyQueryTerm) {
        if (query.toString()
                .contains(new StringBuffer('"').append(DocumentPojo.sourceKey_).append('"').toString())) {
            throw new RuntimeException(
                    "Can't specify sourceKey as part of complex query term: " + query.toString());
        } //TESTED (by hand, "{ \"sourceKey\": \"x\", \"$or\": [ { \"sourceKey\": \"x\" } ] }")

        if (sourceKeyQueryTerm instanceof String) {
            query.put(DocumentPojo.sourceKey_,
                    SourcePojo.getDistributedKeyQueryTerm((String) sourceKeyQueryTerm));
        } //TESTED (by hand, "{\"sourceKey\": \"feeds.arstechnica.com.arstechnica.index.11.2.\" }")
        else if (sourceKeyQueryTerm instanceof DBObject) { // find all the _sources_ matching this term, and convert to a big list including distribution
            BasicDBObject fields = new BasicDBObject(SourcePojo.key_, 1);
            fields.put(SourcePojo.highestDistributionFactorStored_, 1);
            DBCursor dbc = sourcesDB.find(new BasicDBObject(SourcePojo.key_, sourceKeyQueryTerm), fields);
            LinkedList<String> sourceKeys = new LinkedList<String>();
            for (DBObject dbo : dbc) {
                String key = (String) dbo.get(SourcePojo.key_);
                Integer distributionFactor = (Integer) dbo.get(SourcePojo.highestDistributionFactorStored_);
                Collection<String> sourceKeysForSource = SourcePojo.getDistributedKeys(key, distributionFactor);
                sourceKeys.addAll(sourceKeysForSource);
            }
            query.put(DocumentPojo.sourceKey_, new BasicDBObject(DbManager.in_, sourceKeys));
        } //TESTED (by hand, "{\"sourceKey\": { \"$gt\": \"dev.ikanow\" } }")
        else {
            throw new RuntimeException("Can't specify sourceKey as part of complex query term");
        } //(actually not possible, just included here for mathematical completeness...)         
    } else {
        if (query.toString()
                .contains(new StringBuffer('"').append(DocumentPojo.sourceKey_).append('"').toString())) {
            throw new RuntimeException("Can't specify sourceKey as part of complex query term");
        } //TESTE (by hand, "{ \"$or\": [ { \"sourceKey\": \"x\" } ] }")

        // Optimize communityId into sourceKeys...
        if (null != query.get(DocumentPojo.communityId_)) {
            try {
                ObjectId commId = query.getObjectId(DocumentPojo.communityId_);
                BasicDBObject fields = new BasicDBObject(SourcePojo.key_, 1);
                fields.put(SourcePojo.highestDistributionFactorStored_, 1);
                DBCursor dbc = sourcesDB.find(new BasicDBObject(SourcePojo.communityIds_, commId), fields);
                LinkedList<String> sourceKeys = new LinkedList<String>();
                int added = 0;
                for (DBObject dbo : dbc) {
                    String key = (String) dbo.get(SourcePojo.key_);
                    Integer distributionFactor = (Integer) dbo.get(SourcePojo.highestDistributionFactorStored_);
                    Collection<String> sourceKeysForSource = SourcePojo.getDistributedKeys(key,
                            distributionFactor);
                    sourceKeys.addAll(sourceKeysForSource);
                    added += sourceKeysForSource.size();
                }
                query.put(DocumentPojo.sourceKey_, new BasicDBObject(DbManager.in_, sourceKeys));

                System.out.println("(Optimized simple community query to " + added + " source key(s))");
            } catch (Exception e) {
                //DEBUG
                //e.printStackTrace();

                System.out.println("(Can't optimize complex community query: " + e.getMessage());
            }
        } //TESTED (by hand - including distributed source version)
    }
    // Ignored delete objects
    Object urlQuery = query.get(DocumentPojo.url_);
    if (null == urlQuery) {
        query.put(DocumentPojo.url_, Pattern.compile("^[^?]")); // (ie nothing starting with ?)
    } //TESTED
    else if (urlQuery instanceof BasicDBObject) {
        ((BasicDBObject) urlQuery).append("$regex", "^[^?]");
    } //TESTED
      //DEBUG
      //System.out.println("COMBINED QUERY= " + query.toString());

    // If aggregating, kick off the background aggregation thread
    if (bAggregate) {
        EntityBackgroundAggregationManager.startThread();
        AssociationBackgroundAggregationManager.startThread();
    }

    //Debug:
    DBCursor dbc = null;
    dbc = docsDB.find(query);
    if (null != chunk) {
        if (chunk.containsField(DbManager.min_)) {
            dbc = dbc.addSpecial(DbManager.min_, chunk.get(DbManager.min_));
        }
        if (chunk.containsField(DbManager.max_)) {
            dbc = dbc.addSpecial(DbManager.max_, chunk.get(DbManager.max_));
        }
    }
    dbc = dbc.skip(nSkip).limit(nLimit).batchSize(1000);
    if (null == chunk) {
        int nCount = dbc.count() - nSkip;
        if (nCount < 0)
            nCount = 0;
        System.out.println(
                "Found " + nCount + " records to sync, process first " + (0 == nLimit ? nCount : nLimit));
        if (0 == nCount) { // Nothing to do...
            return;
        }
    }

    byte[] storageArray = new byte[200000];

    int nSynced = 0;
    LinkedList<DocumentPojo> docsToTransfer = new LinkedList<DocumentPojo>();
    Map<ObjectId, LinkedList<DocumentPojo>> communityList = null;
    ObjectId currCommunityId = null;
    while (dbc.hasNext()) {
        BasicDBObject dbo = (BasicDBObject) dbc.next();
        DocumentPojo doc = DocumentPojo.fromDb(dbo, DocumentPojo.class);
        String sDocIndex = doc.getIndex();
        if (null == sDocIndex) {
            sDocIndex = "document_index";
        }
        if ((null != _deletedIndex) && !_deletedIndex.contains(sDocIndex)) {
            _deletedIndex.add(sDocIndex);
            rebuildIndex(sDocIndex);
            try { // (Just in case the index requires some time to sort itself out)
                Thread.sleep(1000);
            } catch (InterruptedException e) {
            }
        }

        //Debug:
        //System.out.println("Getting content..." + feed.getTitle() + " / " + feed.getUrl());

        // Get the content:
        if ((0 != nMaxContentSize_bytes)
                && StoreAndIndexManager.docHasExternalContent(doc.getUrl(), doc.getSourceUrl())) {
            BasicDBObject contentQ = new BasicDBObject(CompressedFullTextPojo.url_, doc.getUrl());
            contentQ.put(CompressedFullTextPojo.sourceKey_,
                    new BasicDBObject(MongoDbManager.in_, Arrays.asList(null, doc.getSourceKey())));
            BasicDBObject fields = new BasicDBObject(CompressedFullTextPojo.gzip_content_, 1);
            fields.put(CompressedFullTextPojo.sourceKey_, 1);

            DBCursor dbcGzip = contentDB.find(contentQ, fields);
            while (dbcGzip.hasNext()) {
                BasicDBObject dboContent = (BasicDBObject) dbcGzip.next();
                if (!dboContent.containsField(CompressedFullTextPojo.sourceKey_)) {
                    // If this has another version then ignore this one...
                    if (dbc.hasNext()) {
                        continue;
                    } //TESTED (by hand)               
                }

                byte[] compressedData = ((byte[]) dboContent.get(CompressedFullTextPojo.gzip_content_));
                ByteArrayInputStream in = new ByteArrayInputStream(compressedData);
                GZIPInputStream gzip = new GZIPInputStream(in);
                int nRead = 0;
                StringBuffer output = new StringBuffer();
                while (nRead >= 0) {
                    nRead = gzip.read(storageArray, 0, 200000);
                    if (nRead > 0) {
                        String s = new String(storageArray, 0, nRead, "UTF-8");
                        output.append(s);
                    }
                }
                doc.setFullText(output.toString());
            }
        }
        // (else document has full text already)

        // Get tags, if necessary:
        // Always overwrite tags - one of the reasons we might choose to migrate
        // Also may need source in order to support source index filtering
        SourcePojo src = _sourceCache.get(doc.getSourceKey());
        if (null == src) {
            //TODO (INF-2265): handle search index settings in pipeline mode... (also didn't seem to work?)
            BasicDBObject srcDbo = (BasicDBObject) sourcesDB
                    .findOne(new BasicDBObject(SourcePojo.key_, doc.getSourceKey()));
            if (null != srcDbo) {
                src = SourcePojo.fromDb(srcDbo, SourcePojo.class);

                if (null != src.getProcessingPipeline()) {
                    try {
                        // Set the index settings
                        HarvestController hc = new HarvestController();
                        HarvestControllerPipeline hcPipe = new HarvestControllerPipeline();
                        hcPipe.extractSource_preProcessingPipeline(src, hc);
                    } catch (Exception e) {
                        //DEBUG
                        e.printStackTrace();
                    }
                } //TESTED (by hand)

                _sourceCache.put(doc.getSourceKey(), src);
            }
        }
        doc.setTempSource(src); // (needed for source index filtering)
        if (null != src) {
            if (null != src.getTags()) {
                Set<String> tagsTidied = new TreeSet<String>();
                for (String s : src.getTags()) {
                    String ss = s.trim().toLowerCase();
                    tagsTidied.add(ss);
                }

                // May also want to write this back to the DB:
                //TODO (INF-2223): Handle append tags or not in the pipeline...
                if ((null == src.getAppendTagsToDocs()) || src.getAppendTagsToDocs()) {
                    if ((null == doc.getTags()) || (doc.getTags().size() < tagsTidied.size())) {
                        BasicDBObject updateQuery = new BasicDBObject(DocumentPojo.sourceKey_,
                                doc.getRawSourceKey()); // (ie including the # if there is one)
                        updateQuery.put(DocumentPojo._id_, doc.getId());
                        docsDB.update(updateQuery,
                                new BasicDBObject(DbManager.addToSet_, new BasicDBObject(DocumentPojo.tags_,
                                        new BasicDBObject(DbManager.each_, tagsTidied))));
                    }
                    doc.setTags(tagsTidied); // (just copy ptr across)
                }
            }
        }

        // 2. Update the index with the new document            

        // (Optionally also update entity and assoc features)

        if (bAggregate) {
            if (null == currCommunityId) {
                currCommunityId = doc.getCommunityId();
            } else if (!currCommunityId.equals(doc.getCommunityId())) {
                LinkedList<DocumentPojo> perCommunityDocList = null;
                if (null == communityList) { // (very first time we see > 1 community)
                    communityList = new TreeMap<ObjectId, LinkedList<DocumentPojo>>();
                    perCommunityDocList = new LinkedList<DocumentPojo>();
                    perCommunityDocList.addAll(docsToTransfer); //(NOT including doc, this hasn't been added to docsToTransfer yet)
                    communityList.put(currCommunityId, perCommunityDocList);
                }
                currCommunityId = doc.getCommunityId();
                perCommunityDocList = communityList.get(currCommunityId);
                if (null == perCommunityDocList) {
                    perCommunityDocList = new LinkedList<DocumentPojo>();
                    communityList.put(currCommunityId, perCommunityDocList);
                }
                perCommunityDocList.add(doc);
            }
        } //TESTED

        nSynced++;
        docsToTransfer.add(doc);
        if (0 == (nSynced % 10000)) {
            StoreAndIndexManager manager = new StoreAndIndexManager();

            if (bAggregate) {
                // Loop over communities and aggregate each one then store the modified entities/assocs               
                doAggregation(communityList, docsToTransfer);
                communityList = null; // (in case the next 10,000 docs are all in the same community!)
                currCommunityId = null;

            } //TOTEST            

            manager.addToSearch(docsToTransfer);
            docsToTransfer.clear();
            System.out.println("(Synced " + nSynced + " records)");
        }

    } // (End loop over docs)

    // Sync remaining docs

    if (!docsToTransfer.isEmpty()) {
        if (bAggregate) {
            // Loop over communities and aggregate each one then store the modified entities/assocs               
            doAggregation(communityList, docsToTransfer);
        }

        StoreAndIndexManager manager = new StoreAndIndexManager();
        manager.addToSearch(docsToTransfer);
    }

    if (null != chunk) {
        System.out.println("Found " + nSynced + " records to sync in chunk");
    }

    if (bAggregate) {
        System.out.println("Completed. You can hit CTRL+C at any time.");
        System.out.println(
                "By default it will keep running for 5 minutes while the background aggregation runs to update the documents' entities.");
        try {
            Thread.sleep(300000);
        } catch (InterruptedException e) {
        }

        // Turn off so we can exit
        EntityBackgroundAggregationManager.stopThreadAndWait();
        AssociationBackgroundAggregationManager.stopThreadAndWait();
    }
}

From source file:com.images3.data.impl.ImagePlantAccessImplMongoDB.java

License:Apache License

public void updateImagePlant(ImagePlantOS imagePlant) {
    DBCollection coll = getDatabase().getCollection("ImagePlant");
    BasicDBObject criteria = new BasicDBObject().append("id", imagePlant.getId());
    WriteResult result = coll.update(criteria, getObjectMapper().mapToBasicDBObject(imagePlant));
    checkForAffectedDocuments(result, 1);
}

From source file:com.images3.data.impl.TemplateAccessImplMongoDB.java

License:Apache License

public void updateTemplate(TemplateOS template) {
    DBCollection coll = getDatabase().getCollection("Template");
    BasicDBObject criteria = new BasicDBObject().append("imagePlantId", template.getId().getImagePlantId())
            .append("nameKey", template.getId().getTemplateName().toLowerCase());
    WriteResult result = coll.update(criteria, getObjectMapper().mapToBasicDBObject(template));
    checkForAffectedDocuments(result, 1);
}

From source file:com.impetus.client.mongodb.MongoDBClient.java

License:Apache License

/**
 * Handle update functions.//from  www  .j  a v  a  2 s . c  o m
 * 
 * @param query
 *            the query
 * @param update
 *            the update
 * @param collName
 *            the coll name
 * @return the int
 */
public int handleUpdateFunctions(BasicDBObject query, BasicDBObject update, String collName) {
    DBCollection collection = mongoDb.getCollection(collName);
    KunderaCoreUtils.printQuery("Update collection:" + query, showQuery);
    WriteResult result = null;
    try {
        result = collection.update(query, update);
    } catch (MongoException ex) {
        return -1;
    }
    if (result.getN() <= 0)
        return -1;
    return result.getN();
}