Example usage for com.mongodb DBCollection update

List of usage examples for com.mongodb DBCollection update

Introduction

In this page you can find the example usage for com.mongodb DBCollection update.

Prototype

public WriteResult update(final DBObject query, final DBObject update, final boolean upsert,
        final boolean multi) 

Source Link

Document

Modify an existing document or documents in collection.

Usage

From source file:com.softinstigate.restheart.db.DBDAO.java

License:Open Source License

/**
 *
 * @param dbName/*from   w w w.  j  a v a2 s.  co m*/
 * @param content
 * @param etag
 * @param patching
 * @return
 */
public static int upsertDB(String dbName, DBObject content, ObjectId etag, boolean patching) {
    DB db = client.getDB(dbName);

    boolean existing = db.getCollectionNames().size() > 0;

    if (patching && !existing) {
        return HttpStatus.SC_NOT_FOUND;
    }

    DBCollection coll = db.getCollection("_properties");

    // check the etag
    if (db.collectionExists("_properties")) {
        if (etag == null) {
            return HttpStatus.SC_CONFLICT;
        }

        BasicDBObject idAndEtagQuery = new BasicDBObject("_id", "_properties");
        idAndEtagQuery.append("_etag", etag);

        if (coll.count(idAndEtagQuery) < 1) {
            return HttpStatus.SC_PRECONDITION_FAILED;
        }
    }

    // apply new values
    ObjectId timestamp = new ObjectId();
    Instant now = Instant.ofEpochSecond(timestamp.getTimestamp());

    if (content == null) {
        content = new BasicDBObject();
    }

    content.put("_etag", timestamp);
    content.removeField("_created_on"); // make sure we don't change this field
    content.removeField("_id"); // make sure we don't change this field

    if (patching) {
        coll.update(METADATA_QUERY, new BasicDBObject("$set", content), true, false);

        return HttpStatus.SC_OK;
    } else {
        // we use findAndModify to get the @created_on field value from the existing document
        // we need to put this field back using a second update 
        // it is not possible in a single update even using $setOnInsert update operator
        // in this case we need to provide the other data using $set operator and this makes it a partial update (patch semantic) 
        DBObject old = coll.findAndModify(METADATA_QUERY, fieldsToReturn, null, false, content, false, true);

        if (old != null) {
            Object oldTimestamp = old.get("_created_on");

            if (oldTimestamp == null) {
                oldTimestamp = now.toString();
                logger.warn("properties of collection {} had no @created_on field. set to now",
                        coll.getFullName());
            }

            // need to readd the @created_on field 
            BasicDBObject createdContet = new BasicDBObject("_created_on", "" + oldTimestamp);
            createdContet.markAsPartialObject();
            coll.update(METADATA_QUERY, new BasicDBObject("$set", createdContet), true, false);

            return HttpStatus.SC_OK;
        } else {
            // need to readd the @created_on field 
            BasicDBObject createdContet = new BasicDBObject("_created_on", now.toString());
            createdContet.markAsPartialObject();
            coll.update(METADATA_QUERY, new BasicDBObject("$set", createdContet), true, false);

            return HttpStatus.SC_CREATED;
        }
    }
}

From source file:com.softinstigate.restheart.db.DocumentDAO.java

License:Open Source License

/**
 *
 *
 * @param dbName//from   ww w.  j  a  va2s  .  c o m
 * @param collName
 * @param documentId
 * @param content
 * @param requestEtag
 * @param patching
 * @return the HttpStatus code to retrun
 */
public static int upsertDocument(String dbName, String collName, String documentId, DBObject content,
        ObjectId requestEtag, boolean patching) {
    DB db = DBDAO.getDB(dbName);

    DBCollection coll = db.getCollection(collName);

    ObjectId timestamp = new ObjectId();
    Instant now = Instant.ofEpochSecond(timestamp.getTimestamp());

    if (content == null) {
        content = new BasicDBObject();
    }

    content.put("_etag", timestamp);

    BasicDBObject idQuery = new BasicDBObject("_id", getId(documentId));

    if (patching) {
        content.removeField("_created_on"); // make sure we don't change this field

        DBObject oldDocument = coll.findAndModify(idQuery, null, null, false,
                new BasicDBObject("$set", content), false, false);

        if (oldDocument == null) {
            return HttpStatus.SC_NOT_FOUND;
        } else {
            // check the old etag (in case restore the old document version)
            return optimisticCheckEtag(coll, oldDocument, requestEtag, HttpStatus.SC_OK);
        }
    } else {
        content.put("_created_on", now.toString()); // let's assume this is an insert. in case we'll set it back with a second update

        // we use findAndModify to get the @created_on field value from the existing document
        // in case this is an update well need to put it back using a second update 
        // it is not possible to do it with a single update
        // (even using $setOnInsert update because we'll need to use the $set operator for other data and this would make it a partial update (patch semantic) 
        DBObject oldDocument = coll.findAndModify(idQuery, null, null, false, content, false, true);

        if (oldDocument != null) { // upsert
            Object oldTimestamp = oldDocument.get("_created_on");

            if (oldTimestamp == null) {
                oldTimestamp = now.toString();
                logger.warn("properties of document /{}/{}/{} had no @created_on field. set to now", dbName,
                        collName, documentId);
            }

            // need to readd the @created_on field 
            BasicDBObject created = new BasicDBObject("_created_on", "" + oldTimestamp);
            created.markAsPartialObject();
            coll.update(idQuery, new BasicDBObject("$set", created), true, false);

            // check the old etag (in case restore the old document version)
            return optimisticCheckEtag(coll, oldDocument, requestEtag, HttpStatus.SC_OK);
        } else { // insert
            return HttpStatus.SC_CREATED;
        }
    }
}

From source file:com.softinstigate.restheart.db.DocumentDAO.java

License:Open Source License

/**
 *
 *
 * @param exchange/*from w  w  w. j a  v a2 s  . c  o m*/
 * @param dbName
 * @param collName
 * @param content
 * @param requestEtag
 * @return the HttpStatus code to retrun
 */
public static int upsertDocumentPost(HttpServerExchange exchange, String dbName, String collName,
        DBObject content, ObjectId requestEtag) {
    DB db = DBDAO.getDB(dbName);

    DBCollection coll = db.getCollection(collName);

    ObjectId timestamp = new ObjectId();
    Instant now = Instant.ofEpochSecond(timestamp.getTimestamp());

    if (content == null) {
        content = new BasicDBObject();
    }

    content.put("_etag", timestamp);
    content.put("_created_on", now.toString()); // make sure we don't change this field

    Object _id = content.get("_id");
    content.removeField("_id");

    if (_id == null) {
        ObjectId id = new ObjectId();
        content.put("_id", id);

        coll.insert(content);

        exchange.getResponseHeaders().add(HttpString.tryFromString("Location"),
                getReferenceLink(exchange.getRequestURL(), id.toString()).toString());

        return HttpStatus.SC_CREATED;
    }

    BasicDBObject idQuery = new BasicDBObject("_id", getId("" + _id));

    // we use findAndModify to get the @created_on field value from the existing document
    // we need to put this field back using a second update 
    // it is not possible in a single update even using $setOnInsert update operator
    // in this case we need to provide the other data using $set operator and this makes it a partial update (patch semantic) 
    DBObject oldDocument = coll.findAndModify(idQuery, null, null, false, content, false, true);

    if (oldDocument != null) { // upsert
        Object oldTimestamp = oldDocument.get("_created_on");

        if (oldTimestamp == null) {
            oldTimestamp = now.toString();
            logger.warn("properties of document /{}/{}/{} had no @created_on field. set to now", dbName,
                    collName, _id.toString());
        }

        // need to readd the @created_on field 
        BasicDBObject createdContet = new BasicDBObject("_created_on", "" + oldTimestamp);
        createdContet.markAsPartialObject();
        coll.update(idQuery, new BasicDBObject("$set", createdContet), true, false);

        // check the old etag (in case restore the old document version)
        return optimisticCheckEtag(coll, oldDocument, requestEtag, HttpStatus.SC_OK);
    } else { // insert
        return HttpStatus.SC_CREATED;
    }
}

From source file:com.stratio.connector.mongodb.core.engine.MongoStorageEngine.java

License:Apache License

@Override
protected void update(TableName tableName, Collection<Relation> assignments, Collection<Filter> whereClauses,
        Connection<MongoClient> connection) throws ExecutionException, UnsupportedException {

    DB db = connection.getNativeConnection().getDB(tableName.getCatalogName().getName());
    DBCollection coll = db.getCollection(tableName.getName());

    UpdateDBObjectBuilder updateBuilder = new UpdateDBObjectBuilder();
    for (Relation rel : assignments) {
        updateBuilder.addUpdateRelation(rel.getLeftTerm(), rel.getOperator(), rel.getRightTerm());
    }/*  w  w w  .  ja va2s.co  m*/
    try {
        coll.update(buildFilter(whereClauses), updateBuilder.build(), false, true);
    } catch (MongoException e) {
        logger.error("Error updating the data: " + e.getMessage());
        throw new ExecutionException(e.getMessage(), e);
    }

}

From source file:com.wincere.lamda.storm.bolt.CreateTable.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes no args/*from ww w  .j a  v  a  2s . co  m*/
 * @throws UnknownHostException if it cannot connect to a MongoDB instance at localhost:27017
 */
public void update(BasicDBObject doc, OutputCollector collector, Tuple input) throws UnknownHostException {
    // connect to the local database server
    MongoCredential credential = MongoCredential.createMongoCRCredential("superuser", "admin",
            "12345678".toCharArray());
    try {
        MongoClient mongoClient = new MongoClient(new ServerAddress("172.16.1.171", 27017),
                Arrays.asList(credential));

        // MongoClient mongoClient = new MongoClient("172.16.1.171",27017);

        /*
        // Authenticate - optional
        MongoCredential credential = MongoCredential.createMongoCRCredential(userName, database, password);
        MongoClient mongoClient = new MongoClient(new ServerAddress(), Arrays.asList(credential));
        */

        // get handle to "mydb"
        DB db = mongoClient.getDB("UCAPBatchTest");

        // get a collection object to work with
        DBCollection coll = db.getCollection("Queries1");
        //  DBCollection status = db.getCollection("statustest1");
        //DBCollection coll1 = db.getCollection("queryaudittest1");
        // drop all the data in it
        //coll.drop();
        //status.drop();
        //coll1.drop();

        /*  status.insert(new BasicDBObject().append("queryStatus", "Open").append("QueryStatusID","1"));
          status.insert(new BasicDBObject().append("queryStatus", "Answered").append("QueryStatusID","2"));
          status.insert(new BasicDBObject().append("queryStatus", "Closed").append("QueryStatusID","3"));
          status.insert(new BasicDBObject().append("queryStatus", "Cancelled").append("QueryStatusID","4")); */
        // make a document and insert it

        int count = 0;
        DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss");
        try {

            //.equals("Open")?"1":(splitValue[5].equals("Answered")?"2":"3")

            BasicDBObject searchQuery = new BasicDBObject().append("queryRepeatKey",
                    (String) doc.get("queryRepeatKey"));
            BasicDBObject newDocument = new BasicDBObject();

            DBCursor cursor = coll.find(searchQuery);
            //DBObject result = cursor.next();

            if (cursor.hasNext()) {
                DBObject result = cursor.next();

                String queryValue = (String) result.get("queryValue");
                String queryStatusID = (String) result.get("queryStatusID");
                String queryResponse = (String) result.get("queryResponse");
                String queryResolvedTimeStamp = (String) result.get("queryResolvedTimeStamp");
                String queryAnsweredTimeStamp = (String) result.get("queryAnsweredTimeStamp");
                String queryCreatedTimeStamp = (String) result.get("queryCreatedTimeStamp");

                if (doc.get("queryValue").equals("\\N")) {
                    doc.append("queryValue", queryValue);
                }
                if (doc.get("queryStatusID").equals("\\N")) {
                    doc.append("queryStatusID", queryStatusID);
                }
                if (doc.get("queryResponse").equals("\\N")) {
                    doc.append("queryResponse", queryResponse);
                }
                if (doc.get("queryResolvedTimeStamp").equals("\\N")) {
                    doc.append("queryResolvedTimeStamp", queryResolvedTimeStamp);
                }
                if (doc.get("queryAnsweredTimeStamp").equals("\\N")) {
                    doc.append("queryAnsweredTimeStamp", queryAnsweredTimeStamp);
                }
                doc.append("queryCreatedTimeStamp", queryCreatedTimeStamp);
            }
            if (doc.get("queryStatusID").equals("Open"))
                doc.append("queryCreatedTimeStamp", doc.get("queryCreatedTimeStamp"));

            //System.out.println(count);
            newDocument.append("$set", doc);
            try {
                coll.update(searchQuery, newDocument, true, true);
            } catch (MongoException me) {
                collector.fail(input);
            }
            // collector.ack(input);

            //coll.insert(doc);

        } catch (Exception e) {
            System.err.println("CSV file cannot be read : " + e);
        }

        //System.out.println(count);
        // lets get all the documents in the collection and print them out
        /*DBCursor cursor = coll1.find();
        try {
            while (cursor.hasNext()) {
        System.out.println(cursor.next());
            }
        } finally {
            cursor.close();
        }*/

        /* // now use a query to get 1 document out
         BasicDBObject query = new BasicDBObject("i", 71);
         cursor = coll.find(query);
                
         try {
             while (cursor.hasNext()) {
        System.out.println(cursor.next());
             }
         } finally {
             cursor.close();
         }*/

        // release resources
        //db.dropDatabase();
        mongoClient.close();
    } catch (UnknownHostException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:de.flapdoodle.mongoom.datastore.Datastore.java

License:Apache License

private <T> void store(Operation operation, T entity) {
    IEntityTransformation<T> converter = _transformations.transformation((Class<T>) entity.getClass());
    DBCollection dbCollection = _db.getCollection(converter.collection().name());
    Object idValue = converter.getId(entity);
    Object versionValue = converter.getVersion(entity);

    //      if (idValue == null)
    //         throw new MappingException(entity.getClass(), "Key is NULL");
    //      DBObject convertedEntity = converter.convertTo(entity);

    BasicDBObject key = new BasicDBObject();
    key.put(Const.ID_FIELDNAME, idValue);
    if (versionValue != null)
        key.put(Const.VERSION_FIELDNAME, versionValue);

    boolean reReadId = true;
    boolean mustHaveObjectId = false;
    boolean update = false;

    switch (operation) {
    case Delete://from   w  w  w.  ja v a 2s  . c  o m
        mustHaveObjectId = true;
        reReadId = false;
        break;
    case Save:
        mustHaveObjectId = true;
        break;
    case Update:
        reReadId = false;
        update = true;
        if (idValue == null)
            throw new MappingException(entity.getClass(), "Can not update Entities with Id not set");
        break;
    }

    try {
        _db.requestStart();
        if (mustHaveObjectId) {
            if ((idValue != null) && (!(idValue instanceof ObjectId))) {
                throw new MappingException(entity.getClass(), "Can not save Entities with custom Id");
            }
        }

        converter.newVersion(entity);
        DBObject convertedEntity = converter.asObject(entity);

        switch (operation) {
        case Insert:
            _logger.fine("Insert: " + convertedEntity);
            if (idValue != null) {
                _logger.log(Level.WARNING, "Insert with Id set: " + idValue, new Exception());
            }
            dbCollection.insert(convertedEntity);
            break;
        case Update:
            _logger.fine("Update: " + convertedEntity + " (Id: " + idValue + ")");
            //               BasicDBObject updateQuery=new BasicDBObject();
            //               updateQuery.put(Const.ID_FIELDNAME, idValue);
            dbCollection.update(key, convertedEntity, false, false);
            break;
        case Save:
            _logger.fine("Save: " + convertedEntity);
            dbCollection.save(convertedEntity);
            break;
        case Delete:
            _logger.fine("Delete: " + key);
            dbCollection.remove(key);
            break;
        default:
            throw new ObjectMapperException("Operation not supported: " + operation);
        }

        if (reReadId) {
            Object savedIdValue = convertedEntity.get(Const.ID_FIELDNAME);
            converter.setId(entity, savedIdValue);
        }

        Errors.checkError(_db, operation);

        if (operation == Operation.Delete) {
            converter.setId(entity, null);
        }
    } finally {
        _db.requestDone();
    }

}

From source file:edu.stanford.epad.common.util.MongoDBOperations.java

License:Open Source License

/**
 * Converts and saves an annotation to mongoDB
 * @param annotationID/* ww  w  .j av  a  2  s  .  c  om*/
 * @param aimXML
 * @param collection
 * @throws Exception
 */
public static void saveAnnotationToMongo(String annotationID, String aimXML, String collection)
        throws Exception {
    try {
        DB db = MongoDBOperations.getMongoDB();
        if (db == null) {
            log.warning("No connection to Mongo DB");
            return;
        }
        if (aimXML == null || aimXML.trim().length() == 0)
            return;
        String jsonString = XML.toJSONObject(aimXML).toString(0);
        ;
        if (jsonString == null)
            throw new Exception("Error converting to json");
        DBCollection dbColl = db.getCollection(collection);
        BasicDBObject dbObj = new BasicDBObject("ImageAnnotationCollection.uniqueIdentifier.root", 1);
        dbColl.createIndex(dbObj, "uid_idx", true); // Does not create index, if it already exists
        BasicDBObject query = new BasicDBObject();
        query.put("ImageAnnotationCollection.uniqueIdentifier.root", annotationID);
        DBObject dbObject = (DBObject) JSON.parse(jsonString);
        DBCursor cursor = dbColl.find(query);
        if (cursor.count() > 0) {
            log.info("Updating existing annotation in mongoDB:" + annotationID + " in " + collection);
            dbColl.update(query, dbObject, true, false);
        } else {
            log.info("Creating new annotation in mongoDB:" + annotationID + " in " + collection);
            dbColl.insert(dbObject);
        }
    } catch (Exception e) {
        log.warning("Error saving AIM to mongodb:", e);
        throw e;
    }
}

From source file:ezbake.services.centralPurge.thrift.EzCentralPurgeServiceHandler.java

License:Apache License

@Override
public void updatePurge(EzSecurityToken token, PurgeState inputPurgeState, String applicationName,
        String serviceName) throws EzSecurityTokenException, TException {
    DBObject dbObject = null;//from   w w  w. ja v  a 2 s .c  o m
    Map<String, ApplicationPurgeState> appStatesMap = null;
    CentralPurgeState centralPurgeState = null;
    CentralAgeOffEventState centralAgeOffEventState = null;
    Set<Long> centralCompletelyPurgedSet = null;
    Set<Long> centralToBePurgedSet = null;
    DBCollection purgeColl = null;
    DBCollection ageOffColl = null;
    Mongo mongoClient = null;

    AuditEvent evt = event(AuditEventType.FileObjectModify.getName(), token).arg("event", "update purge")
            .arg("purgeId", inputPurgeState.getPurgeId()).arg("service name", serviceName)
            .arg("application name", applicationName);

    ThriftClientPool pool = null;
    try {
        pool = new ThriftClientPool(configuration);
        securityClient.validateReceivedToken(token);
        // Validates that the application that is calling update purge is allowed to update for the passed appName
        String securityId = "";
        EzSecurityTokenWrapper wrapper = new EzSecurityTokenWrapper(token);
        // note: if the  centralPurgeService is calling then any appName can be updated
        if (wrapper.getSecurityId().equals(purgeAppSecurityId)) {
            securityId = purgeAppSecurityId;
        } else {
            securityId = pool.getSecurityId(getSecurityName(applicationName, serviceName));
        }

        if (!securityId.equals(wrapper.getSecurityId())) {
            throw new EzSecurityTokenException(
                    "The security id for the token does match the applicationName passed");
        }

        // Get access to the ageoff collection within Mongo
        MongoConfigurationHelper mongoConfigurationHelper = new MongoConfigurationHelper(configuration);
        MongoHelper mongoHelper = new MongoHelper(configuration);
        mongoClient = mongoHelper.getMongo();
        DB mongoDB = mongoClient.getDB(mongoConfigurationHelper.getMongoDBDatabaseName());
        purgeColl = mongoDB.getCollection(PURGE_COLLECTION);
        ageOffColl = mongoDB.getCollection(AGEOFF_COLLECTION);

        Long purgeId = inputPurgeState.getPurgeId();

        // Attempt to get the CentralPurgeState
        BasicDBObject query = new BasicDBObject(EzCentralPurgeServiceHelpers.PurgeId, purgeId);
        DBCursor cursor = purgeColl.find(query);

        boolean ageOff = false;
        CentralPurgeStatus centralPurgeStatus;

        // Check to see if the id passed corresponds to a purge event
        if (cursor.hasNext()) {
            //Set the map of application states and the set of ids to purge
            dbObject = cursor.next();
            centralPurgeState = decodeCentralPurgeState((DBObject) dbObject.get(CentralPurgeStateString));
            appStatesMap = centralPurgeState.getApplicationStates();

            PurgeInfo purgeInfo = centralPurgeState.getPurgeInfo();
            centralCompletelyPurgedSet = purgeInfo.getPurgeDocumentIds();
            centralToBePurgedSet = purgeInfo.getPurgeDocumentIds();
            centralPurgeStatus = centralPurgeState.getCentralStatus();
        } else {
            query = new BasicDBObject(EzCentralPurgeServiceHelpers.AgeOffEventId, purgeId);
            // If it doesn't exist as a purge, check to see if it is an ageOffEvent
            cursor = ageOffColl.find(query);

            if (cursor.hasNext()) {
                //Set the map of application states and the set of ids to purge
                dbObject = cursor.next();
                centralAgeOffEventState = decodeCentralAgeOffEventState(
                        (DBObject) dbObject.get(CentralAgeOffStateString));
                appStatesMap = centralAgeOffEventState.getApplicationStates();
                AgeOffEventInfo ageOffEventInfo = centralAgeOffEventState.getAgeOffEventInfo();
                centralToBePurgedSet = ageOffEventInfo.getPurgeSet();
                centralCompletelyPurgedSet = ageOffEventInfo.getPurgeSet();
                centralPurgeStatus = centralAgeOffEventState.getCentralStatus();
                ageOff = true;
            } else {
                throw new CentralPurgeServiceException("No purge with purgeId:" + purgeId);
            }
        }

        ServicePurgeState servicePurgeState = null;
        Map<String, ServicePurgeState> servicePurgeStatesMap = null;
        ApplicationPurgeState applicationPurgeState = null;
        // Gets the mongoDB entry for the service that is updating it's purge status.
        try {
            applicationPurgeState = appStatesMap.get(applicationName);
            servicePurgeStatesMap = applicationPurgeState.getServicePurgestates();
            servicePurgeState = servicePurgeStatesMap.get(serviceName);
            if (servicePurgeState == null) {
                throw new NullPointerException("Failed to find [" + applicationName + "_" + serviceName
                        + "] for purgeId" + inputPurgeState.getPurgeId() + " to update");
            }
        } catch (NullPointerException e) {
            throw e;
        }
        // Update the ServicePurgeState and put it back
        servicePurgeState.setTimeLastPoll(getCurrentDateTime());
        servicePurgeState.setPurgeState(inputPurgeState);
        servicePurgeStatesMap.put(serviceName, servicePurgeState);
        appStatesMap.put(applicationName, applicationPurgeState);
        boolean interventionNeeded = false;
        boolean stopped = true;
        Set<Long> servicePurged;

        /* These nested loops check each service to get an update of the CompletelyPurgedSet, see if any purge
         * service is still running and if manual intervention is/will be needed.
         */
        // Loop through all apps
        for (String appNameIter : appStatesMap.keySet()) {
            ApplicationPurgeState applicationPurgeStateInner = appStatesMap.get(appNameIter);
            Map<String, ServicePurgeState> servicePurgeStates = applicationPurgeStateInner
                    .getServicePurgestates();

            //Loop through all services
            for (String serviceNameIter : servicePurgeStates.keySet()) {
                PurgeState applicationServicePurgeState = servicePurgeStates.get(serviceNameIter)
                        .getPurgeState();
                servicePurged = applicationServicePurgeState.getPurged();
                applicationServicePurgeState.getPurged().removeAll(applicationServicePurgeState.getNotPurged());

                //update based on current service
                centralCompletelyPurgedSet = Sets.intersection(centralCompletelyPurgedSet, servicePurged);
                if (serviceStillRunning(applicationServicePurgeState.getPurgeStatus())) {
                    stopped = false;
                }
                if (!(applicationServicePurgeState.getNotPurged().isEmpty())) {
                    interventionNeeded = true;
                }
            }
        }

        // If all of the ids that needed to be purged have been purged then it resolved automatically
        boolean resolved = false;
        if (centralCompletelyPurgedSet.containsAll(centralToBePurgedSet)) {
            resolved = true;
            centralPurgeStatus = CentralPurgeStatus.RESOLVED_AUTOMATICALLY;
        }
        // If one of the services has a document that couldn't be
        // automatically resolved, manual intervention is needed
        if (centralPurgeStatus != CentralPurgeStatus.RESOLVED_MANUALLY
                && centralPurgeStatus != CentralPurgeStatus.RESOLVED_AUTOMATICALLY) {
            if (interventionNeeded) {
                if (stopped) {
                    centralPurgeStatus = CentralPurgeStatus.STOPPED_MANUAL_INTERVENTION_NEEDED;
                } else {
                    centralPurgeStatus = CentralPurgeStatus.ACTIVE_MANUAL_INTERVENTION_WILL_BE_NEEDED;
                }
            }
        } else {
            resolved = true;
        }

        if (ageOff == false) {
            // If it is a purge event, update the CentralPurgeState in MongoDB
            centralPurgeState.setApplicationStates(appStatesMap);
            centralPurgeState.setCentralStatus(centralPurgeStatus);

            dbObject.put(CentralPurgeStateString, encodeCentralPurgeState(centralPurgeState));
            purgeColl.update(query, dbObject, true, false);

            // Also need to update the purge in the ProvenanceService
            ProvenanceService.Client provenanceClient = null;
            try {
                provenanceClient = getProvenanceThriftClient(pool);
                EzSecurityToken centralTokenForProvenance = securityClient.fetchDerivedTokenForApp(token,
                        getProvenanceSecurityId(pool));
                provenanceClient.updatePurge(centralTokenForProvenance, purgeId, centralCompletelyPurgedSet,
                        null, resolved);
                PurgeInfo purgeInfo = provenanceClient.getPurgeInfo(centralTokenForProvenance, purgeId);
                centralPurgeState.setPurgeInfo(purgeInfo);
                updateCentralPurgeState(centralPurgeState, purgeId);
            } finally {
                if (provenanceClient != null)
                    returnClientToPool(provenanceClient, pool);
            }
        } else {
            // If it is an ageOffEvent, update the CentralAgeOffState in MongoDB
            centralAgeOffEventState.setApplicationStates(appStatesMap);
            centralAgeOffEventState.setCentralStatus(centralPurgeStatus);

            AgeOffEventInfo ageOffEventInfo = centralAgeOffEventState.getAgeOffEventInfo();
            ageOffEventInfo.setCompletelyPurgedSet(centralCompletelyPurgedSet);
            ageOffEventInfo.setResolved(resolved);
            centralAgeOffEventState.setAgeOffEventInfo(ageOffEventInfo);
            dbObject.put(CentralAgeOffStateString, encodeCentralAgeOffEventState(centralAgeOffEventState));
            ageOffColl.update(query, dbObject, true, false);

            // If there are ids aged by all services then tell the provenance service
            if (!centralCompletelyPurgedSet.isEmpty()) {
                ProvenanceService.Client provenanceClient = null;
                try {
                    provenanceClient = getProvenanceThriftClient(pool);
                    EzSecurityToken centralTokenForProvenance = securityClient.fetchDerivedTokenForApp(token,
                            getProvenanceSecurityId(pool));
                    provenanceClient.markDocumentAsAged(centralTokenForProvenance, centralCompletelyPurgedSet);
                } finally {
                    if (provenanceClient != null)
                        returnClientToPool(provenanceClient, pool);
                }
            }
        }
        evt.arg("status", servicePurgeState.getPurgeState().getPurgeStatus().name());
        logger.info("[" + applicationName + "_" + serviceName + "] purgeId:" + inputPurgeState.getPurgeId()
                + " purgedIds:" + inputPurgeState.getPurged() + " status:" + inputPurgeState.getPurgeStatus());
    } catch (CentralPurgeServiceException e) {
        logError(e, evt, e.getMessage());
        throw e;
    } catch (NullPointerException e) {
        logError(e, evt, "CentralPurgeService encountered an exception in updatePurge:["
                + e.getClass().getName() + ":" + e.getMessage() + "]");
        throw new CentralPurgeServiceException("CentralPurgeService encountered an exception in updatePurge:["
                + e.getClass().getName() + ":" + e.getMessage() + "]");
    } catch (EzSecurityTokenException e) {
        logError(e, evt, "CentralPurgeService failed when trying to validate token:[" + e.getClass().getName()
                + ":" + e.getMessage() + "]");
        throw e;
    } catch (UnknownHostException e) {
        logError(e, evt, "CentralPurgeService unable to reach MongoDB in updatePurge:[" + e.getClass().getName()
                + ":" + e.getMessage() + "]");
        throw new CentralPurgeServiceException("CentralPurgeService unable to reach MongoDB in updatePurge:["
                + e.getClass().getName() + ":" + e.getMessage() + "]");
    } catch (Exception e) {
        logError(e, evt, "CentralPurgeService encountered an exception in updatePurge:["
                + e.getClass().getName() + ":" + e.getMessage() + "]");
        throw new CentralPurgeServiceException("CentralPurgeService encountered an exception in updatePurge:["
                + e.getClass().getName() + ":" + e.getMessage() + "]");
    } finally {
        if (pool != null)
            pool.close();
        auditLogger.logEvent(evt);
        logEventToPlainLogs(logger, evt);
        if (mongoClient != null)
            mongoClient.close();
    }
}

From source file:ezbake.services.centralPurge.thrift.EzCentralPurgeServiceHandler.java

License:Apache License

private void updateCentralAgeOffEventState(CentralAgeOffEventState centralAgeOffEventState, Long ageOffId)
        throws UnknownHostException {

    DBCollection ageOffColl = null;
    Mongo mongoClient = null;/*from   w  w w  .  j ava  2  s .  c  o  m*/
    try {
        MongoConfigurationHelper mongoConfigurationHelper = new MongoConfigurationHelper(configuration);
        MongoHelper mongoHelper = new MongoHelper(configuration);
        mongoClient = mongoHelper.getMongo();
        DB mongoDB = mongoClient.getDB(mongoConfigurationHelper.getMongoDBDatabaseName());
        ageOffColl = mongoDB.getCollection(AGEOFF_COLLECTION);

        // Update the state if it exists, insert if not
        BasicDBObject query = new BasicDBObject(EzCentralPurgeServiceHelpers.AgeOffEventId, ageOffId);
        BasicDBObject ageOffEvent = new BasicDBObject().append(AgeOffEventId, ageOffId)
                .append(CentralAgeOffStateString, encodeCentralAgeOffEventState(centralAgeOffEventState));
        boolean upsert = true;
        boolean multiUpdate = false;
        ageOffColl.update(query, ageOffEvent, upsert, multiUpdate);
    } catch (UnknownHostException e) {
        // TODO: log that couldn't connect to MongoDB
        throw e;
    } finally {
        if (mongoClient != null)
            mongoClient.close();
    }
}

From source file:ezbake.services.centralPurge.thrift.EzCentralPurgeServiceHandler.java

License:Apache License

private void updateCentralPurgeState(CentralPurgeState centralPurgeState, Long purgeId)
        throws UnknownHostException {
    DBCollection purgeColl = null;
    Mongo mongoClient = null;// ww  w  . j  av a  2 s .  c  o m
    try {
        MongoConfigurationHelper mongoConfigurationHelper = new MongoConfigurationHelper(configuration);
        MongoHelper mongoHelper = new MongoHelper(configuration);
        mongoClient = mongoHelper.getMongo();
        DB mongoDB = mongoClient.getDB(mongoConfigurationHelper.getMongoDBDatabaseName());
        purgeColl = mongoDB.getCollection(PURGE_COLLECTION);

        // Update the state if it exists, insert if not
        BasicDBObject query = new BasicDBObject(PurgeId, purgeId);
        BasicDBObject purgeStatus = new BasicDBObject()
                .append(PurgeId, centralPurgeState.getPurgeInfo().getId())
                .append(CentralPurgeStateString, encodeCentralPurgeState(centralPurgeState));
        boolean upsert = true;
        boolean multiUpdate = false;
        purgeColl.update(query, purgeStatus, upsert, multiUpdate);
    } finally {
        if (mongoClient != null)
            mongoClient.close();
    }
}