Example usage for com.mongodb MongoException MongoException

List of usage examples for com.mongodb MongoException MongoException

Introduction

In this page you can find the example usage for com.mongodb MongoException MongoException.

Prototype

public MongoException(final String msg) 

Source Link

Usage

From source file:de.steilerdev.myVerein.server.model.GridFSRepository.java

License:Open Source License

public GridFSFile storeClubLogo(MultipartFile clubLogoFile) throws MongoException {
    if (!clubLogoFile.getContentType().startsWith("image")) {
        logger.warn("Trying to store a club logo, which is not an image");
        throw new MongoException("The file needs to be an image");
    } else if (!(clubLogoFile.getContentType().equals("image/jpeg")
            || clubLogoFile.getContentType().equals("image/png"))) {
        logger.warn("Trying to store an incompatible image " + clubLogoFile.getContentType());
        throw new MongoException("The used image is not compatible, please use only PNG or JPG files");
    } else {//from w w w . j a va  2  s  .  c o m
        File clubLogoTempFile = null;
        try {
            clubLogoTempFile = File.createTempFile("tempClubLogo", "png");
            clubLogoTempFile.deleteOnExit();
            if (clubLogoFile.getContentType().equals("image/png")) {
                logger.debug("No need to convert club logo");
                clubLogoFile.transferTo(clubLogoTempFile);
            } else {
                logger.info("Converting club logo file to png");
                //Reading, converting and writing club logo
                ImageIO.write(ImageIO.read(clubLogoFile.getInputStream()), clubLogoFileFormat,
                        clubLogoTempFile);
            }

            //Deleting current file
            deleteCurrentClubLogo();

            try (FileInputStream clubLogoStream = new FileInputStream(clubLogoTempFile)) {
                logger.debug("Saving club logo");
                //Saving file
                return gridFS.store(clubLogoStream, clubLogoFileName, clubLogoFileFormatMIME);
            }
        } catch (IOException e) {
            e.printStackTrace();
            throw new MongoException("Unable to store file");
        } finally {
            if (clubLogoTempFile != null) {
                clubLogoTempFile.delete();
            }
        }
    }
}

From source file:dk.au.cs.karibu.testdoubles.FakeObjectStorage.java

License:Apache License

@Override
public void process(String collectionName, BasicDBObject dbo) {
    if (storeMethodInvocationCount == countOfStoresBeforeThrow) {
        storeMethodInvocationCount++;/*from  ww w  . j a  va 2s .  c  om*/
        throw new MongoException(toThrow);
    }
    storeMethodInvocationCount++;

    // System.out.println(" FakeObjectStorage: storing "+dbo);
    List<BasicDBObject> collection = database.get(collectionName);
    if (collection == null) {
        collection = new ArrayList<BasicDBObject>(2);
        database.put(collectionName, collection);
    }
    collection.add(dbo);
}

From source file:ezbake.data.mongo.HandlerForDriverFindCalls.java

License:Apache License

private void checkNonSupportedQueries(DBObject query, String collection) {
    if (query.get("$where") != null) {
        throw new MongoException(
                "$where operator not supported at this time since redact could not be enforced");
    }//from   w w  w . j a va  2  s  . com
}

From source file:ezbake.data.mongo.HandlerForDriverFindCalls.java

License:Apache License

public ResultsWrapper find_driver(String collection, EzFindRequest ezFindRequest, EzSecurityToken token)
        throws TException, EzMongoDriverException {
    if (!collection.equals("$cmd") && !collection.equals("system.indexes") &&
    // !collection.equals("fs.chunks") && !collection.equals("fs.files") &&
            !collection.equals("system.namespaces")) {
        collection = appName + "_" + collection;
    }//w w w.  j ava 2 s . c o m

    appLog.info("find_driver() from collection: {}", collection);

    TokenUtils.validateSecurityToken(token, parent_handler.handler.getConfigurationProperties());
    ResultsWrapper rw = new ResultsWrapper();
    try {
        DBObject ref = (DBObject) new ObjectInputStream(new ByteArrayInputStream(ezFindRequest.getRef()))
                .readObject();

        checkNonSupportedQueries(ref, collection);

        DBObject fields = (DBObject) new ObjectInputStream(new ByteArrayInputStream(ezFindRequest.getFields()))
                .readObject();

        appLog.info("find_driver() ref: {}", ref);
        appLog.info("find_driver() fields: {}", fields);

        String originalDatabase = null;
        boolean isSysCommand = false;
        if (collection.equals("system.namespaces")) {
            isSysCommand = true;
        }
        if (collection.equals("system.indexes")) {
            isSysCommand = true;
            String ns = (String) ref.get("ns");
            if (ns != null) {
                String[] parts = StringUtils.split(ns, ".", 2);
                originalDatabase = parts[0];
                String newNS = dbName + "." + appName + "_" + parts[1];
                appLog.info("system.indexes newNS: {}", newNS);
                ref.put("ns", newNS);
            }
        }

        if (collection.equals("$cmd")) {
            isSysCommand = true;
            String eval = (String) ref.get("$eval");
            if (eval != null) {
                throw new MongoException("Eval() not supported in ezmongo");
            }

            String geoNear = (String) ref.get("geoNear");
            if (geoNear != null) {
                geoNear = appName + "_" + geoNear;
                appLog.info("newgeoNear: {}", geoNear);
                ref.put("geoNear", geoNear);
            }

            String collstats = (String) ref.get("collstats");
            if (collstats != null) {
                collstats = appName + "_" + collstats;
                appLog.info("newcollstats: {}", collstats);
                ref.put("collstats", collstats);
            }

            String create = (String) ref.get("create");
            if (create != null) {
                create = appName + "_" + create;
                appLog.info("newcreate: {}", create);
                ref.put("create", create);
            }

            String deleteIndexes = (String) ref.get("deleteIndexes");
            if (deleteIndexes != null) {
                String newDeleteIndexes = appName + "_" + deleteIndexes;
                appLog.info("newDeleteIndexes: {}", newDeleteIndexes);
                ref.put("deleteIndexes", newDeleteIndexes);
            }

            String distinct = (String) ref.get("distinct");
            if (distinct != null) {
                String newDistinct = appName + "_" + distinct;
                appLog.info("newDistinct: {}", newDistinct);
                ref.put("distinct", newDistinct);
            }

            String count = (String) ref.get("count");
            if (count != null) {
                String newCount = appName + "_" + count;
                appLog.info("newCount: {}", newCount);
                ref.put("count", newCount);
            }

            String findandmodify = (String) ref.get("findandmodify");
            if (findandmodify != null) {
                String newFindandmodify = appName + "_" + findandmodify;
                appLog.info("newFindandmodify: {}", newFindandmodify);
                ref.put("findandmodify", newFindandmodify);
            }

            String aggregate = (String) ref.get("aggregate");
            if (aggregate != null) {
                aggregate = appName + "_" + aggregate;
                appLog.info("newaggregate: {}", aggregate);
                ref.put("aggregate", aggregate);

                List<DBObject> pipeline = (List) ref.get("pipeline");
                if (pipeline != null) {
                    appLog.info("pipeline: {}", pipeline);
                    for (DBObject o : pipeline) {
                        String out = (String) o.get("$out");
                        if (out != null) {
                            appLog.info("NEED to convert: {}", out);
                            out = appName + "_" + out;
                            o.put("$out", out);
                        }
                    }
                }
            }

            String mapreduce = (String) ref.get("mapreduce");
            if (mapreduce != null) {
                mapreduce = appName + "_" + mapreduce;
                appLog.info("newmapreduce: {}", mapreduce);
                ref.put("mapreduce", mapreduce);

                DBObject out = (DBObject) ref.get("out");
                if (out != null) {
                    String replace = (String) out.get("replace");
                    if (replace != null) {
                        replace = appName + "_" + replace;
                        out.put("replace", replace);
                        ref.put("out", out);
                    }
                }

                appLog.info("newref: " + ref);
            }

            DBObject group = (DBObject) ref.get("group");
            if (group != null) {
                String ns = (String) group.get("ns");
                if (ns != null) {
                    String newNS = appName + "_" + ns;
                    appLog.info("group newNS: {}", newNS);
                    group.put("ns", newNS);
                    ref.put("group", group);
                }
            }
        }

        int batchSize = ezFindRequest.getBatchSize();
        int limit = ezFindRequest.getLimit();
        int options = ezFindRequest.getOptions();
        int numToSkip = ezFindRequest.getNumToSkip();

        ReadPreference readPref = (ReadPreference) new ObjectInputStream(
                new ByteArrayInputStream(ezFindRequest.getReadPref())).readObject();

        DBDecoder decoder = (DBDecoder) new ObjectInputStream(
                new ByteArrayInputStream(ezFindRequest.getDecoder())).readObject();

        HashMap<String, String> auditParamsMap = new HashMap<>();
        auditParamsMap.put("action", "find_driver");
        auditParamsMap.put("collectionName", collection);
        auditParamsMap.put("ref", parent_handler.handler.printMongoObject(ref));
        auditParamsMap.put("fields", parent_handler.handler.printMongoObject(fields));
        auditParamsMap.put("readPref", parent_handler.handler.printMongoObject(readPref));
        auditParamsMap.put("batchSize", String.valueOf(batchSize));
        auditParamsMap.put("limit", String.valueOf(limit));
        auditParamsMap.put("options", String.valueOf(options));
        auditParamsMap.put("numToSkip", String.valueOf(numToSkip));
        parent_handler.handler.auditLog(token, AuditEventType.FileObjectAccess, auditParamsMap);

        QueryResultIterator qri = null;
        if ((isSysCommand || ref.get("$explain") != null) && ref.get("distinct") == null) {
            if (ezFindRequest.isSetEncoder()) {
                DBEncoder encoder = (DBEncoder) new ObjectInputStream(
                        new ByteArrayInputStream(ezFindRequest.getEncoder())).readObject();
                qri = parent_handler.handler.db.getCollection(parent_handler.normalizeCollection(collection))
                        .find(ref, fields, numToSkip, batchSize, limit, options, readPref, decoder, encoder);
            } else {
                qri = parent_handler.handler.db.getCollection(parent_handler.normalizeCollection(collection))
                        .find(ref, fields, numToSkip, batchSize, limit, options, readPref, decoder);
            }
        } else {
            // must convert to agg pipline in order to perform redact
            qri = convertFindForDriver(parent_handler.normalizeCollection(collection), ref, fields, "",
                    numToSkip, limit, batchSize, readPref, token, EzMongoHandler.READ_OPERATION);
        }

        String mapreduce = (String) ref.get("mapreduce");
        if (mapreduce != null) {
            Response r = qri.getResponse();
            List<DBObject> obs = r.get_objects();
            if (obs.size() == 1) {
                DBObject o = obs.get(0);
                String resultTable = (String) o.get("result");
                resultTable = StringUtils.replace(resultTable, appName + "_", "", 1);
                appLog.info("find() Replaced map reduce result table name: {}", resultTable);
                o.put("result", resultTable);
            }
        }

        if (collection.equals("system.namespaces")) {
            appLog.info("find() reverting collection names from system.namespaces");
            Response r = qri.getResponse();
            List<DBObject> obs = r.get_objects();
            for (DBObject o : obs) {
                String name = (String) o.get("name");
                if (!name.equals(dbName + "." + "system.indexes")) {
                    String newName = StringUtils.replace(name, appName + "_", "", 1);
                    o.put("name", newName);
                }
            }
            appLog.info("reverted obs: {}", obs);
        }

        int originalHashcode = qri.hashCode();

        qri.setOriginalHashCode(originalHashcode);

        parent_handler.handler.setResponseObjectWithCursor(rw, qri);

        appLog.info("QueryResultIterator hashcode: {}", qri.hashCode());

        List<DBObject> results = parent_handler.putIntoITmap(qri, originalHashcode);

        //            if (parent_handler.ifRequestIsForAGridFSObject(ref.get("_id"))){
        //                GridFS _fs = new GridFS(parent_handler.handler.mongoTemplate.getDb());
        //                if (ref.get("_id") != null) {
        //                    GridFSDBFile file = _fs.findOne(new BasicDBObject("_id", ref.get("_id")));
        //                    rw.setResultSet(parent_handler.ser(file));
        //                } else {
        //                    List<GridFSDBFile> files = _fs.find(ref, fields);
        //                    rw.setResultSet(parent_handler.ser(files));
        //                }
        //            } else {
        rw.setResultSet(parent_handler.handler.addDBCursorResult(results).toByteArray());
        //            }

    } catch (Exception e) {
        e.printStackTrace();
        EzMongoDriverException eme = new EzMongoDriverException();
        eme.setEx(parent_handler.ser(e));
        throw eme;
    }

    return rw;
}

From source file:ezbake.data.mongo.HandlerForDriverFindCalls.java

License:Apache License

protected QueryResultIterator convertFindForDriver(String collectionName, DBObject jsonQuery,
        DBObject projection, String jsonSort, int skip, int limit, int batchSize, ReadPreference readPref,
        EzSecurityToken token, String operationType) throws Exception {

    appLog.info("convertFindForDriver() query: " + jsonQuery);

    AggregationOptions opts = null;//from   w w  w .ja  v a  2s . c  o  m
    if (batchSize > 0) {
        opts = AggregationOptions.builder().outputMode(AggregationOptions.OutputMode.CURSOR)
                .batchSize(batchSize).build();
    } else {
        opts = AggregationOptions.builder().build();
    }

    Object distinct = jsonQuery.get("distinct");
    Object key = null;
    if (distinct != null) {
        key = jsonQuery.get("key");

        Object q = jsonQuery.get("query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
    }

    jsonQuery = checkForQueryComment(jsonQuery);
    jsonQuery = checkForshowDiskLoc(jsonQuery);

    Object returnKey = jsonQuery.get("$returnKey");
    if (returnKey != null) {
        Object q = jsonQuery.get("$query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
    }

    Object snapshot = jsonQuery.get("$snapshot");
    if (snapshot != null) {
        Object ob = jsonQuery.get("$orderby");
        if (ob != null) {
            throw new MongoException("Do not use $snapshot with cursor.hint() and cursor.sort() methods");
        }
        Object hint = jsonQuery.get("$hint");
        if (hint != null) {
            throw new MongoException("Do not use $snapshot with cursor.hint() and cursor.sort() methods");
        }
        Object q = jsonQuery.get("$query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
    }

    Object explain = jsonQuery.get("$explain");
    if (explain != null) {
        Object q = jsonQuery.get("$query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
    }

    Object orderby = jsonQuery.get("$orderby");
    if (orderby != null) {
        Object q = jsonQuery.get("$query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
        jsonSort = orderby.toString();
    }

    Object maxScan = jsonQuery.get("$maxScan");
    if (maxScan != null) {
        Object q = jsonQuery.get("$query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
        limit = (Integer) maxScan;
    }

    Object min = jsonQuery.get("$min");
    if (min != null) {
        Object q = jsonQuery.get("$query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
    }

    Object max = jsonQuery.get("$max");
    if (max != null) {
        Object q = jsonQuery.get("$query");
        if (q != null) {
            jsonQuery = (DBObject) q;
        }
    }

    QueryResultIterator qri = null;
    DBObject query = null;
    if (jsonQuery != null && jsonQuery.keySet().size() > 0) {
        query = new BasicDBObject("$match", jsonQuery);
    }

    DBObject[] additionalOps = parent_handler.handler.getMongoFindHelper().getFindAggregationCommandsArray(skip,
            limit, (projection != null && projection.keySet().size() > 0) ? projection.toString() : "",
            jsonSort, token, operationType);

    List<DBObject> pipeline = new ArrayList<DBObject>();
    if (query != null) {
        pipeline.add(query);
    }

    Collections.addAll(pipeline, additionalOps);

    appLog.info("convertFindForDriver() final pipeline query: " + pipeline);

    Cursor cursor = null;
    if (distinct != null) {
        qri = handleDistinctCall(jsonQuery, readPref, token, opts, distinct, key, pipeline);
    } else if (max != null && min != null) {
        // TODO can max AND min be possible? investigate...
    } else if (max != null) {
        qri = handleMaxCall(collectionName, max, jsonQuery, readPref, token, opts, pipeline);
    } else if (min != null) {
        qri = handleMinCall(collectionName, min, jsonQuery, readPref, token, opts, pipeline);
    } else {
        cursor = parent_handler.handler.db.getCollection(collectionName).aggregate(pipeline, opts, readPref);
        if (cursor instanceof QueryResultIterator) {
            qri = (QueryResultIterator) cursor;
        } else {
            appLog.info("UNKNOWN CURSOR RETURNED: {}", cursor.toString());
            throw new Exception("Find converted to Aggregate pipeline did not return a QueryResultIterator: "
                    + cursor.toString());
        }
    }

    return qri;
}

From source file:ezbake.data.mongo.MongoDriverHandler.java

License:Apache License

public EzWriteResult update_driver(String collection, EzUpdateRequest req, EzSecurityToken token)
        throws TException, EzMongoDriverException {
    if (!collection.equals("fs.chunks") && !collection.equals("fs.files")) {
        collection = appName + "_" + collection;
    }/*from  w  w w .  jav  a2  s.c o m*/

    appLog.debug("update_driver() collection: {}", collection);

    TokenUtils.validateSecurityToken(token, handler.getConfigurationProperties());
    EzWriteResult ewr = new EzWriteResult();

    try {
        DBCollection c = handler.db.getCollection(normalizeCollection(collection));

        ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(req.getQuery()));
        DBObject query = (DBObject) ois.readObject();

        appLog.debug("QUERY {}", query);

        ois = new ObjectInputStream(new ByteArrayInputStream(req.getDbUpdateObject()));
        DBObject updateObject = (DBObject) ois.readObject();

        appLog.debug("UPDATE OBJECT {}", updateObject);

        ois = new ObjectInputStream(new ByteArrayInputStream(req.getWriteConcern()));
        WriteConcern concern = (WriteConcern) ois.readObject();

        ois = new ObjectInputStream(new ByteArrayInputStream(req.getDbEncoder()));
        DBEncoder encoder = (DBEncoder) ois.readObject();

        Boolean isDriverUnitTestMode = req.isIsUnitTestMode();

        HashMap<String, String> auditParamsMap = new HashMap<>();
        auditParamsMap.put("action", "update_driver");
        auditParamsMap.put("collectionName", collection);
        auditParamsMap.put("query", handler.printMongoObject(query));
        auditParamsMap.put("updateObject", handler.printMongoObject(updateObject));
        auditParamsMap.put("concern", handler.printMongoObject(concern));
        auditParamsMap.put("encoder", handler.printMongoObject(encoder));
        handler.auditLog(token, AuditEventType.FileObjectModify, auditParamsMap);

        // assume it's a WRITE operation - then we need to iterate through the redacted results
        //   and see if they are updating the security fields - then it becomes MANAGE operation.
        QueryResultIterator qri = findHandler.convertFindForDriver(normalizeCollection(collection), query, null,
                "", 0, 0, 0, null, token, EzMongoHandler.WRITE_OPERATION);

        final List<Object> idList = new ArrayList<Object>();
        while (qri.hasNext()) {
            DBObject o = qri.next();
            if (!isDriverUnitTestMode) {
                // also need to check if the user has the auths to update the record
                try {
                    boolean isManageOperation = handler.getMongoUpdateHelper().isUpdatingSecurityFields(o,
                            updateObject);
                    handler.getMongoInsertHelper().checkAbilityToInsert(token, null, updateObject, o,
                            isManageOperation, true);
                    appLog.debug("can update DBObject (_id): {}", o.get("_id"));
                    idList.add(o.get("_id"));
                } catch (ClassCastException | VisibilityParseException | EzMongoBaseException e) {
                    appLog.error(e.toString());
                    appLog.debug("User does not have the auths to update record: {}", o);
                }
            } else {
                appLog.debug("can update DBObject (_id): {}", o.get("_id"));
                idList.add(o.get("_id"));
            }
        }

        final DBObject inClause = new BasicDBObject("$in", idList);
        final DBObject redactedQuery = new BasicDBObject("_id", inClause);

        WriteResult res = null;
        // only update the objects that were returned after performing the redact
        if (idList.size() > 0) {
            res = c.update(redactedQuery, updateObject, req.isUpsert(), req.isMulti(), concern, encoder);
        } else {
            throw new MongoException("Nothing to update, perhaps redact prohibited. "
                    + "Also note that upsert is not supported. If you used save() to make this call,"
                    + " try using insert() instead is possible");
        }

        appLog.debug("WriteResult: {}", res);

        ByteArrayOutputStream bOut = new ByteArrayOutputStream();
        new ObjectOutputStream(bOut).writeObject(res);

        ewr.setWriteResult(bOut.toByteArray());
    } catch (MongoException e) {
        appLog.error(e.toString());
        addWriteResultException(ewr, e);
    } catch (Exception e) {
        appLog.error(e.toString());
        throw new TException(e);
    }
    return ewr;
}

From source file:me.lightspeed7.mongofs.gridfs.GridFSDBFile.java

License:Apache License

byte[] getChunk(final int i) {

    if (fs == null) {
        throw new IllegalStateException("No GridFS instance defined!");
    }/*from   w w  w  .j  a v  a 2s. c o  m*/

    DBObject chunk = fs.getChunksCollection().findOne(new BasicDBObject("files_id", id).append("n", i));
    if (chunk == null) {
        throw new MongoException("Can't find a chunk!  file id: " + id + " chunk: " + i);
    }

    return (byte[]) chunk.get("data");
}

From source file:me.lightspeed7.mongofs.gridfs.GridFSFile.java

License:Apache License

/**
 * Saves the file entry to the files collection
 * /*from ww  w.  j av  a 2s .co m*/
 * @throws MongoException
 */
public void save() {

    if (fs == null) {
        throw new MongoException("need fs");
    }
    fs.getFilesCollection().save(this);
}

From source file:me.lightspeed7.mongofs.gridfs.GridFSFile.java

License:Apache License

/**
 * Verifies that the MD5 matches between the database and the local file. This should be called after transferring a file.
 * //  w  ww  . ja  v  a 2s.  c om
 * @throws MongoException
 */
public void validate() {

    if (fs == null) {
        throw new MongoException("no fs");
    }
    if (md5 == null) {
        throw new MongoException("no md5 stored");
    }

    DBObject cmd = new BasicDBObject("filemd5", id);
    cmd.put("root", fs.getBucketName());
    DBObject res = fs.getDB().command(cmd);
    if (res != null && res.containsField("md5")) {
        String m = res.get("md5").toString();
        if (m.equals(md5)) {
            return;
        }
        throw new MongoException("md5 differ.  mine [" + md5 + "] theirs [" + m + "]");
    }

    // no md5 from the server
    throw new MongoException("no md5 returned from server: " + res);

}

From source file:me.lightspeed7.mongofs.gridfs.GridFSFile.java

License:Apache License

@Override
public void markAsPartialObject() {

    throw new MongoException("Can't load partial GridFSFile file");
}