Example usage for com.mongodb DBCollection insert

List of usage examples for com.mongodb DBCollection insert

Introduction

In this page you can find the example usage for com.mongodb DBCollection insert.

Prototype

public WriteResult insert(final List<? extends DBObject> documents) 

Source Link

Document

Insert documents into a collection.

Usage

From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java

License:Open Source License

/**
 * Query the trajectory of the given link of the given model with double timestamps
 * save result in MongoDB//  w  w w . ja  v a  2  s .co m
 */
public void WriteLinkTrajectory(double start_ts, double end_ts, String model_name, String link_name,
        String traj_db_name, String traj_coll_name) {

    // create the pipeline operations, first with the $match check the times
    DBObject match_time = new BasicDBObject("$match",
            new BasicDBObject("timestamp", new BasicDBObject("$gte", start_ts).append("$lte", end_ts)));

    // $unwind the models
    DBObject unwind_models = new BasicDBObject("$unwind", "$models");

    // $match for the given model name from the unwinded models
    DBObject match_model = new BasicDBObject("$match", new BasicDBObject("models.name", model_name));

    // build the $projection operation
    DBObject proj_links_fields = new BasicDBObject("_id", 0);
    proj_links_fields.put("timestamp", 1);
    proj_links_fields.put("models.links", 1);
    DBObject project_links = new BasicDBObject("$project", proj_links_fields);

    // $unwind the links
    DBObject unwind_links = new BasicDBObject("$unwind", "$models.links");

    // $match for the given link name from the unwinded links
    DBObject match_link = new BasicDBObject("$match", new BasicDBObject("models.links.name", link_name));

    // build the final $projection operation
    DBObject proj_fields = new BasicDBObject("timestamp", 1);
    proj_fields.put("pos", "$models.links.pos");
    proj_fields.put("rot", "$models.links.rot");
    DBObject project = new BasicDBObject("$project", proj_fields);

    // run aggregation
    List<DBObject> pipeline = Arrays.asList(match_time, unwind_models, match_model, project_links, unwind_links,
            match_link, project);

    AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100)
            .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build();

    Cursor cursor = this.coll.aggregate(pipeline, aggregationOptions);

    try {

        MongoClient mongoClient = new MongoClient(this.dbHost, 27017);

        DB traj_db = mongoClient.getDB(traj_db_name);

        // check if the collection already exists
        if (traj_db.collectionExists(traj_coll_name)) {
            System.out
                    .println("!!! Collection: \'" + traj_db_name + "." + traj_coll_name + "\' already exists!");
        }
        // create the collection
        else {
            // create collection
            DBCollection traj_coll = traj_db.getCollection(traj_coll_name);

            System.out.println("Java - Writing to \'" + traj_db_name + "." + traj_coll_name + "\'");

            // if cursor not empty, append matadata to the first doc
            if (cursor.hasNext()) {
                // create metadata doc
                BasicDBObject meta_data = new BasicDBObject("name", traj_coll_name).append("type", "trajectory")
                        .append("start", start_ts).append("end", end_ts)
                        .append("description", "Link trajectory..");

                // get the first document as the next cursor and append the metadata to it
                BasicDBObject first_doc = (BasicDBObject) cursor.next();

                first_doc.append("metadata", meta_data);

                // insert document with metadata
                traj_coll.insert(first_doc);
            }
            // if query returned no values for these timestamps, get the pose at the nearest timestamp
            else {
                // write the pose to the given db and coll
                this.WriteLinkPoseAt(start_ts, model_name, link_name, traj_db_name, traj_coll_name);
            }

            // insert rest of trajectory
            while (cursor.hasNext()) {
                traj_coll.insert(cursor.next());
            }
        }
    } catch (UnknownHostException e) {
        e.printStackTrace();
    }
}

From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java

License:Open Source License

/**
 * Query the trajectory of the given collision of the given link of the given model from double timestamps
 * save result in MongoDB//from w w  w .j a v a  2  s.c o  m
 */
public void WriteCollisionTrajectory(double start_ts, double end_ts, String model_name, String link_name,
        String collision_name, String traj_db_name, String traj_coll_name) {

    // create the pipeline operations, first with the $match check the times
    DBObject match_time = new BasicDBObject("$match",
            new BasicDBObject("timestamp", new BasicDBObject("$gte", start_ts).append("$lte", end_ts)));

    // $unwind the models
    DBObject unwind_models = new BasicDBObject("$unwind", "$models");

    // $match for the given model name from the unwinded models
    DBObject match_model = new BasicDBObject("$match", new BasicDBObject("models.name", model_name));

    // build the $projection operation
    DBObject proj_links_fields = new BasicDBObject("_id", 0);
    proj_links_fields.put("timestamp", 1);
    proj_links_fields.put("models.links", 1);
    DBObject project_links = new BasicDBObject("$project", proj_links_fields);

    // $unwind the links
    DBObject unwind_links = new BasicDBObject("$unwind", "$models.links");

    // $match for the given link name from the unwinded links
    DBObject match_link = new BasicDBObject("$match", new BasicDBObject("models.links.name", link_name));

    // build the final $projection operation
    DBObject proj_collision_fields = new BasicDBObject("timestamp", 1);
    proj_collision_fields.put("models.links.collisions", 1);
    DBObject project_collisions = new BasicDBObject("$project", proj_collision_fields);

    // $unwind the collisions
    DBObject unwind_collisions = new BasicDBObject("$unwind", "$models.links.collisions");

    // $match for the given collision name from the unwinded collisions
    DBObject match_collision = new BasicDBObject("$match",
            new BasicDBObject("models.links.collisions.name", collision_name));

    // build the final $projection operation
    DBObject proj_fields = new BasicDBObject("timestamp", 1);
    proj_fields.put("pos", "$models.links.collisions.pos");
    proj_fields.put("rot", "$models.links.collisions.rot");
    DBObject project = new BasicDBObject("$project", proj_fields);

    // run aggregation
    List<DBObject> pipeline = Arrays.asList(match_time, unwind_models, match_model, project_links, unwind_links,
            match_link, project_collisions, unwind_collisions, match_collision, project);

    AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100)
            .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build();

    Cursor cursor = this.coll.aggregate(pipeline, aggregationOptions);

    try {

        MongoClient mongoClient = new MongoClient(this.dbHost, 27017);

        DB traj_db = mongoClient.getDB(traj_db_name);

        // check if the collection already exists
        if (traj_db.collectionExists(traj_coll_name)) {
            System.out
                    .println("!!! Collection: \'" + traj_db_name + "." + traj_coll_name + "\' already exists!");
        }
        // create the collection
        else {
            // create collection
            DBCollection traj_coll = traj_db.getCollection(traj_coll_name);

            System.out.println("Java  - Writing to \'" + traj_db_name + "." + traj_coll_name + "\'");

            // if cursor not empty, append matadata to the first doc
            if (cursor.hasNext()) {
                // create metadata doc
                BasicDBObject meta_data = new BasicDBObject("name", traj_coll_name).append("type", "trajectory")
                        .append("start", start_ts).append("end", end_ts)
                        .append("description", "Collision trajectory..");

                // get the first document as the next cursor and append the metadata to it
                BasicDBObject first_doc = (BasicDBObject) cursor.next();

                first_doc.append("metadata", meta_data);

                // insert document with metadata
                traj_coll.insert(first_doc);
            }
            // if query returned no values for these timestamps, get the pose at the nearest timestamp
            else {
                // write the pose to the given db and coll
                this.WriteCollisionPoseAt(start_ts, model_name, link_name, collision_name, traj_db_name,
                        traj_coll_name);
            }

            // insert rest of trajectory
            while (cursor.hasNext()) {
                traj_coll.insert(cursor.next());
            }
        }
    } catch (UnknownHostException e) {
        e.printStackTrace();
    }

}

From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java

License:Open Source License

/**
 * Get the positions of the model links at the given timestamp
 * save result in MongoDB//from   w w  w .ja va 2  s.c  om
 */
public void WriteLinksPositionsAt(String ts_str, String model_name, String traj_db_name) {

    // transform the knowrob time to double with 3 decimal precision
    double timestamp = (double) Math.round((parseTime_d(ts_str) - TIME_OFFSET) * 1000) / 1000;

    // set default coll name
    String traj_coll_name = this.coll.getName();// + "_" + model_name + "_links_at_"+ timestamp;   

    // remove the knowrob namespace (http://knowrob.org/kb/knowrob.owl#) form the model 
    // String model_name = kr_model_name.split("#")[1];

    //System.out.println("Java - timestamp: " + timestamp + " model name: " + model_name);

    // $and list for querying the $match in the aggregation
    BasicDBList time_and_name = new BasicDBList();

    // add the timestamp and the model name
    time_and_name.add(new BasicDBObject("timestamp", new BasicDBObject("$lte", timestamp)));
    time_and_name.add(new BasicDBObject("models.name", model_name));

    // create the pipeline operations, first the $match
    DBObject match_time_and_name = new BasicDBObject("$match", new BasicDBObject("$and", time_and_name));

    // sort the results in descending order on the timestamp (keep most recent result first)
    DBObject sort_desc = new BasicDBObject("$sort", new BasicDBObject("timestamp", -1));

    // $limit the result to 1, we only need one pose
    DBObject limit_result = new BasicDBObject("$limit", 1);

    // $unwind models in order to output only the queried model
    DBObject unwind_models = new BasicDBObject("$unwind", "$models");

    // $match for the given model name from the unwinded models
    DBObject match_model = new BasicDBObject("$match", new BasicDBObject("models.name", model_name));

    // build the $projection operation
    DBObject proj_fields = new BasicDBObject("_id", 0);
    proj_fields.put("timestamp", 1);
    proj_fields.put("links_pos", "$models.links.pos");
    DBObject project = new BasicDBObject("$project", proj_fields);

    // run aggregation
    List<DBObject> pipeline = Arrays.asList(match_time_and_name, sort_desc, limit_result, unwind_models,
            match_model, project);

    AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100)
            .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build();

    Cursor cursor = this.coll.aggregate(pipeline, aggregationOptions);

    try {

        MongoClient mongoClient = new MongoClient(this.dbHost, 27017);

        DB traj_db = mongoClient.getDB(traj_db_name);

        // check if the collection already exists
        if (traj_db.collectionExists(traj_coll_name)) {
            System.out
                    .println("!!! Collection: \'" + traj_db_name + "." + traj_coll_name + "\' already exists!");
        }
        // create the collection
        else {
            // create collection
            DBCollection traj_coll = traj_db.getCollection(traj_coll_name);

            System.out.println("Java  - Writing to \'" + traj_db_name + "." + traj_coll_name + "\'");

            // if cursor not empty, append matadata to the first doc
            if (cursor.hasNext()) {
                // get pancake roundess again in order to append it to the metadata
                double roundess = this.GetPancakeRoundness(ts_str, model_name);

                // create metadata doc
                BasicDBObject meta_data = new BasicDBObject("name", traj_coll_name).append("type", "links_pos")
                        .append("timestamp", timestamp).append("roundness", roundess)
                        .append("description", "Pancake links positions..");

                // get the first document as the next cursor and append the metadata to it
                BasicDBObject first_doc = (BasicDBObject) cursor.next();

                first_doc.append("metadata", meta_data);

                // insert document with metadata
                traj_coll.insert(first_doc);
            }
            // if query returned no values for these timestamps, get the pose at the nearest timestamp
            else {
                System.out.println("Java  - WriteLinksPositionsAt Query returned no results!'");
            }

            // insert rest of trajectory
            while (cursor.hasNext()) {
                traj_coll.insert(cursor.next());
            }
        }
    } catch (UnknownHostException e) {
        e.printStackTrace();
    }
}

From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java

License:Open Source License

/**
 * Get the positions of the model links at the given timestamp
 * save result in MongoDB/*ww w.  j  ava2  s.c om*/
 */
public void WriteLinksTrajs(String start_str, String end_str, String model_name, String traj_db_name) {

    // transform the knowrob time to double with 3 decimal precision
    double start_ts = (double) Math.round((parseTime_d(start_str) - TIME_OFFSET) * 1000) / 1000;
    double end_ts = (double) Math.round((parseTime_d(end_str) - TIME_OFFSET) * 1000) / 1000;

    // set default coll name
    String traj_coll_name = this.coll.getName();/* + "_" 
                                                + model_name + "_links_trajs_" + start_ts + "_" + end_ts;*/

    // remove the knowrob namespace (http://knowrob.org/kb/knowrob.owl#) form the model 
    // String model_name = kr_model_name.split("#")[1];

    // create the pipeline operations, first with the $match check the times
    DBObject match_time = new BasicDBObject("$match",
            new BasicDBObject("timestamp", new BasicDBObject("$gte", start_ts).append("$lte", end_ts)));

    // $unwind models in order to output only the queried model
    DBObject unwind_models = new BasicDBObject("$unwind", "$models");

    // $match for the given model name from the unwinded models
    DBObject match_model = new BasicDBObject("$match", new BasicDBObject("models.name", model_name));

    // build the $projection operation
    DBObject proj_fields = new BasicDBObject("_id", 0);
    proj_fields.put("timestamp", 1);
    proj_fields.put("links_pos", "$models.links.pos");
    DBObject project = new BasicDBObject("$project", proj_fields);

    // run aggregation
    List<DBObject> pipeline = Arrays.asList(match_time, unwind_models, match_model, project);

    AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100)
            .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build();

    Cursor cursor = this.coll.aggregate(pipeline, aggregationOptions);

    try {

        MongoClient mongoClient = new MongoClient(this.dbHost, 27017);

        DB traj_db = mongoClient.getDB(traj_db_name);

        // check if the collection already exists
        if (traj_db.collectionExists(traj_coll_name)) {
            System.out
                    .println("!!! Collection: \'" + traj_db_name + "." + traj_coll_name + "\' already exists!");
        }
        // create the collection
        else {
            // create collection
            DBCollection traj_coll = traj_db.getCollection(traj_coll_name);

            System.out.println("Java  - Writing to \'" + traj_db_name + "." + traj_coll_name + "\'");

            // if cursor not empty, append matadata to the first doc
            if (cursor.hasNext()) {
                // create metadata doc
                BasicDBObject meta_data = new BasicDBObject("name", traj_coll_name)
                        .append("type", "links_trajs").append("start", start_ts).append("end", end_ts)
                        .append("description", "Pancake links trajectories..");

                // get the first document as the next cursor and append the metadata to it
                BasicDBObject first_doc = (BasicDBObject) cursor.next();

                first_doc.append("metadata", meta_data);

                // insert document with metadata
                traj_coll.insert(first_doc);
            }
            // if query returned no values for these timestamps, get the pose at the nearest timestamp
            else {
                System.out.println("Java  - WriteLinksPositionsAt Query returned no results!'");
            }

            // insert rest of trajectory
            while (cursor.hasNext()) {
                traj_coll.insert(cursor.next());
            }
        }
    } catch (UnknownHostException e) {
        e.printStackTrace();
    }
}

From source file:org.mandar.analysis.recsys2014.recsysMain.java

License:Open Source License

public void writeAlgoTestResults(double nDCG, DB db) {
    /*//from  w  w  w .j a  v  a  2s  . c o  m
    * Write the test configuration
    * parameters as well as test
    * results for nDCG @10
    */
    TimeStamp time1 = new TimeStamp(new Date());
    algoConfig.append("timestamp", time1.getTime());
    algoConfig.append("nDCG", nDCG);
    DBCollection eval_runs = db.getCollection(DBSettings.EVAL_COLLECTION);
    eval_runs.insert(algoConfig);

}

From source file:org.mediawiki.importer.XmlDumpReader.java

License:Open Source License

void insertToMongoDBAndMySQL() {
    String comment = "";
    if (rev.Comment != null)
        comment = rev.Comment;/*from ww  w . ja v  a2 s  .c o m*/
    String title = "";
    if (page.Title != null)
        title = page.Title.toString();

    String text = "";
    if (rev.Text != null)
        text = rev.Text;
    text = text.replaceAll("(?s)<!--.*?-->", "");

    //Der erste Absatz erhlt einfach nochmal den Artikeltitel
    text = "\n== " + title + " ==\n \n" + text;

    //unterteile text in unterberschriften
    String expression = "\\s+\\=\\=\\s+.+\\s+\\=\\=\\s+";
    Matcher match = Pattern.compile(expression).matcher(text);
    String[] splittedText = text.split(expression);
    ArrayList<String> subtitles = new ArrayList<String>();
    String subtitle = "";
    while (match.find()) {
        subtitle = match.group().trim();
        //entferne == ... == vom Titel
        subtitles.add(subtitle.substring(3, subtitle.length() - 3));
    }

    try {

        DBCollection article;
        article = this.mongodbArticles;
        DBCollection textindex;
        textindex = this.mongodbTextindexes;

        BasicDBObject doc = new BasicDBObject();
        doc.put("title", title);
        doc.put("comment", comment);
        String mongoid = XmlDumpReader.generateHashForID(text);
        doc.put("_id", mongoid);
        BasicDBObject textindizies = new BasicDBObject();
        BasicDBObject content = new BasicDBObject();
        BasicDBObject link = new BasicDBObject();
        ArrayList<Object> paragraphs = new ArrayList<Object>();
        ArrayList<String> links = new ArrayList<String>();

        int sectionCount = -2;
        long textindexCount = 0;
        String sqlLinkValue = "";
        Connection mysqlConnection = null;
        String redirect = "";

        //ist artikel ein redirect?
        if (rev.Text.trim().toLowerCase().matches("\\A\\#(redirect|weiterleitung)\\s.*")) {
            String[] splittedRedirect = rev.Text.split(
                    "\\#(redirect|weiterleitung|Weiterleitung|WEITERLEITUNG|Redirect|REDIRECT)\\s+\\[\\[");
            redirect = splittedRedirect[1].trim().substring(0, splittedRedirect[1].length() - 2);
        }

        //jedes einzelene unterkapitel wird zusaetzlich seperat gespeichert
        for (String string : splittedText) {
            sectionCount++;
            try {
                subtitle = subtitles.get(sectionCount);
            } catch (Exception e) {
                subtitle = "";
            }
            //nur hinzufgen, wenn text vorhanden ist
            if (string.trim().length() > 0) {
                BasicDBObject paragraph = new BasicDBObject();
                textindexCount++;
                if (redirect.length() > 0) {
                    doc.put("redirect", redirect);
                    System.out.println(title + " => " + redirect);
                } else {

                    String textindexMongoID = XmlDumpReader
                            .generateHashForID(text + "fortextsearch" + String.valueOf(sectionCount));
                    //erstelle unterteilungen, hier kapitel genannt
                    //jedes kapitel wird nochmal in eine eigene collection gesetzt, fr Volltextsuche
                    if (XmlDumpReader.generateTextIndizes) {
                        textindizies.put("article", title);
                        textindizies.put("order", (int) textindexCount);
                        textindizies.put("title", subtitle);
                        textindizies.put("text", string);
                        textindizies.put("_id", textindexMongoID);
                    }

                    paragraph.put("subtitle", subtitle);
                    paragraph.put("content", string);
                    paragraphs.add(paragraph);

                    //fge Links hinzu + notiere alle verlinkungen
                    String linkExpression = "\\[\\[[0-9\\s\\'\\\"\\.\\-\\_\\p{L}]+\\]\\]";
                    Matcher matchLinks = Pattern.compile(linkExpression).matcher(string);
                    String[] splittedLinks = text.split(linkExpression);
                    ArrayList<String> linksParagraph = new ArrayList<String>();
                    String linkText = "";
                    int linksCount = -1;

                    while (matchLinks.find()) {
                        linksCount++;
                        linkText = matchLinks.group().trim();
                        //entferne [[ ... ]] vom Titel
                        linkText = linkText.substring(2, linkText.length() - 2);
                        links.add(linkText);
                        linksParagraph.add(linkText);
                        sqlLinkValue = sqlLinkValue + "," + linkText;
                    }

                    //mysql + mongodb insert fr unterkapitel fr textindexierung
                    if (XmlDumpReader.generateTextIndizes) {
                        try {
                            mysqlConnection = this.mysqlConnection;
                            Statement stmt = (Statement) mysqlConnection.createStatement();
                            String sql = "INSERT INTO  `textindex` (`ID` , `ArticleID`, `MongoID` , `Sort`, `Title` , `Text` , `Links`)"
                                    + "VALUES (" + "NULL ,  '" + lastInsertedArticleID + "', \""
                                    + textindexMongoID + "\", " + (int) +textindexCount + ", "
                                    + XmlDumpReader.sqlEscape(subtitle) + ", " + XmlDumpReader.sqlEscape(string)
                                    + ",  " + XmlDumpReader.sqlEscape(sqlLinkValue) + " " + ");";
                            stmt.executeUpdate(sql);
                        } catch (SQLException e) {
                            System.err.println("Fehler beim mysql insert von textindex: " + e.getMessage());
                        }
                        textindizies.put("link", links);
                        textindex.insert(textindizies);
                    }

                }
            }
        }

        //mysql insert kompletter artikel
        try {
            mysqlConnection = this.mysqlConnection;
            Statement stmt = (Statement) mysqlConnection.createStatement();
            if (sqlLinkValue.length() > 0)
                sqlLinkValue = sqlLinkValue.substring(1);
            String sql = "INSERT INTO  `articles` (`ID` , `MongoID` , `Title` , `Redirect` , `Comment` , `Content`, `Links` )"
                    + "VALUES (" + "NULL ,  \"" + mongoid + "\",  " + XmlDumpReader.sqlEscape(title) + ",  '"
                    + redirect + "',  " + XmlDumpReader.sqlEscape(comment) + ",  "
                    + XmlDumpReader.sqlEscape(rev.Text) + ", " + XmlDumpReader.sqlEscape(sqlLinkValue) + " "
                    + ");";
            stmt.executeUpdate(sql);
            this.lastInsertedArticleID++;
            //frage eingefuegte ID ab, da sie als Relation fuer die Absatze gebraucht wird
            //deaktiviert aus Performancegruenden
            /*sql = "SELECT `ID` FROM `articles` WHERE 1 ORDER BY `ID` DESC LIMIT 1;";
            ResultSet lastArticle = stmt.executeQuery(sql);
            while (lastArticle.next()) {
            lastInsertedArticleID = lastArticle.getInt("ID");
            }*/

        } catch (SQLException e) {
            System.err.println("Fehler beim mysql insert: " + e.getMessage());
        }

        //artikel in mongodb einfuegen

        doc.put("sections", paragraphs);
        doc.put("links", links);
        article.insert(doc);

        //Speicher freigeben
        article = null;
        mysqlConnection = null;

        System.out.println("'" + title + "' ... ok\n");
    } catch (Exception e) {
        System.err.println("Fehler beim mongodb insert von '" + title + "': " + e.getMessage());
    }
}

From source file:org.mongodb.demos.tailable.RealTimeAppServer.java

License:Apache License

private DBCollection createAndGetCappedCollection(String name) throws InterruptedException {
    DBCollection coll = db.getCollection(name);
    coll.drop();// w  ww.jav  a 2 s.c  o  m
    DBObject options = new BasicDBObject("capped", true);
    options.put("size", 1000);
    coll = db.createCollection(name, options);
    coll.insert(BasicDBObjectBuilder.start("status", "initialized").get());
    System.out.println("== capped collection created ===");
    return coll;
}

From source file:org.mongoj.samples.service.persistence.CarPersistenceImpl.java

License:Open Source License

protected Car updateImpl(org.mongoj.samples.model.Car car) throws UpdateException, SystemException {
    DBCollection collection = getDB().getCollection(CarImpl.COLLECTION_NAME);

    if (car.isNew()) {
        car.setNew(false);/*from   w  ww .  j  a v  a2 s  .  co m*/

        CarImpl carImpl = (CarImpl) car;

        carImpl.addMap.clear();
        carImpl.appendMap.clear();
        carImpl.removeMap.clear();
        carImpl.setMap.clear();

        WriteResult writeResult = collection.insert(getDBObject(car));

        String err = writeResult.getError();

        if (err != null) {
            throw new UpdateException(err);
        }
    } else {
        DBObject criteria = new QueryBuilder().put("_id").is(new ObjectId(car.getId())).get();

        CarImpl carImpl = (CarImpl) car;

        BasicDBObjectBuilder updateBuilder = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder setUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder pushUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder pushAllUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder addUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder removeUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder removeAllUpdates = BasicDBObjectBuilder.start();

        for (String field : carImpl.setMap.keySet()) {
            setUpdates = setUpdates.add(field, carImpl.setMap.get(field));
        }

        if (!setUpdates.isEmpty()) {
            updateBuilder.add(SET_OPERATOR, setUpdates.get());
        }

        for (String field : carImpl.appendMap.keySet()) {
            List<Object> list = (List<Object>) carImpl.appendMap.get(field);

            if (!list.isEmpty()) {
                if (list.size() == 1) {
                    pushUpdates = pushUpdates.add(field, ((List) carImpl.appendMap.get(field)).get(0));
                } else {
                    pushAllUpdates = pushAllUpdates.add(field, carImpl.appendMap.get(field));
                }
            }
        }

        if (!pushUpdates.isEmpty()) {
            updateBuilder.add(PUSH_OPERATOR, pushUpdates.get());
        }

        if (!pushAllUpdates.isEmpty()) {
            updateBuilder.add(PUSH_ALL_OPERATOR, pushAllUpdates.get());
        }

        for (String field : carImpl.addMap.keySet()) {
            List<Object> list = (List<Object>) carImpl.addMap.get(field);

            if (!list.isEmpty()) {
                if (list.size() == 1) {
                    addUpdates = addUpdates.add(field, ((List) carImpl.addMap.get(field)).get(0));
                } else {
                    DBObject each = BasicDBObjectBuilder.start()
                            .add(EACH_OPERATOR, ((List) carImpl.addMap.get(field)).toArray()).get();

                    addUpdates = addUpdates.add(field, each);
                }
            }
        }

        if (!addUpdates.isEmpty()) {
            updateBuilder.add(ADD_TO_SET_OPERATOR, addUpdates.get());
        }

        for (String field : carImpl.removeMap.keySet()) {
            List<Object> list = (List<Object>) carImpl.removeMap.get(field);

            if (!list.isEmpty()) {
                if (list.size() == 1) {
                    removeUpdates = removeUpdates.add(field, ((List) carImpl.removeMap.get(field)).get(0));
                } else {
                    removeAllUpdates = removeAllUpdates.add(field, carImpl.removeMap.get(field));
                }
            }
        }

        if (!removeUpdates.isEmpty()) {
            updateBuilder.add(PULL_OPERATOR, removeUpdates.get());
        }

        if (!removeAllUpdates.isEmpty()) {
            updateBuilder.add(PULL_ALL_OPERATOR, removeAllUpdates.get());
        }

        if (!updateBuilder.isEmpty()) {
            DBObject update = updateBuilder.get();

            _log.debug("Update query = {}", update);

            WriteResult writeResult = collection.update(criteria, update);

            String err = writeResult.getError();

            if (err != null) {
                throw new UpdateException(err);
            }
        }
    }

    return car;
}

From source file:org.mongoj.samples.service.persistence.UserPersistenceImpl.java

License:Open Source License

protected User updateImpl(org.mongoj.samples.model.User user) throws UpdateException, SystemException {
    DBCollection collection = getDB().getCollection(UserImpl.COLLECTION_NAME);

    if (user.isNew()) {
        user.setNew(false);/*from   w w  w  . j a  va2  s .c o  m*/

        UserImpl userImpl = (UserImpl) user;

        userImpl.addMap.clear();
        userImpl.appendMap.clear();
        userImpl.removeMap.clear();
        userImpl.setMap.clear();

        WriteResult writeResult = collection.insert(getDBObject(user));

        String err = writeResult.getError();

        if (err != null) {
            throw new UpdateException(err);
        }
    } else {
        DBObject criteria = new QueryBuilder().put("_id").is(new ObjectId(user.getId())).get();

        UserImpl userImpl = (UserImpl) user;

        BasicDBObjectBuilder updateBuilder = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder setUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder pushUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder pushAllUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder addUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder removeUpdates = BasicDBObjectBuilder.start();
        BasicDBObjectBuilder removeAllUpdates = BasicDBObjectBuilder.start();

        for (String field : userImpl.setMap.keySet()) {
            setUpdates = setUpdates.add(field, userImpl.setMap.get(field));
        }

        if (!setUpdates.isEmpty()) {
            updateBuilder.add(SET_OPERATOR, setUpdates.get());
        }

        for (String field : userImpl.appendMap.keySet()) {
            List<Object> list = (List<Object>) userImpl.appendMap.get(field);

            if (!list.isEmpty()) {
                if (list.size() == 1) {
                    pushUpdates = pushUpdates.add(field, ((List) userImpl.appendMap.get(field)).get(0));
                } else {
                    pushAllUpdates = pushAllUpdates.add(field, userImpl.appendMap.get(field));
                }
            }
        }

        if (!pushUpdates.isEmpty()) {
            updateBuilder.add(PUSH_OPERATOR, pushUpdates.get());
        }

        if (!pushAllUpdates.isEmpty()) {
            updateBuilder.add(PUSH_ALL_OPERATOR, pushAllUpdates.get());
        }

        for (String field : userImpl.addMap.keySet()) {
            List<Object> list = (List<Object>) userImpl.addMap.get(field);

            if (!list.isEmpty()) {
                if (list.size() == 1) {
                    addUpdates = addUpdates.add(field, ((List) userImpl.addMap.get(field)).get(0));
                } else {
                    DBObject each = BasicDBObjectBuilder.start()
                            .add(EACH_OPERATOR, ((List) userImpl.addMap.get(field)).toArray()).get();

                    addUpdates = addUpdates.add(field, each);
                }
            }
        }

        if (!addUpdates.isEmpty()) {
            updateBuilder.add(ADD_TO_SET_OPERATOR, addUpdates.get());
        }

        for (String field : userImpl.removeMap.keySet()) {
            List<Object> list = (List<Object>) userImpl.removeMap.get(field);

            if (!list.isEmpty()) {
                if (list.size() == 1) {
                    removeUpdates = removeUpdates.add(field, ((List) userImpl.removeMap.get(field)).get(0));
                } else {
                    removeAllUpdates = removeAllUpdates.add(field, userImpl.removeMap.get(field));
                }
            }
        }

        if (!removeUpdates.isEmpty()) {
            updateBuilder.add(PULL_OPERATOR, removeUpdates.get());
        }

        if (!removeAllUpdates.isEmpty()) {
            updateBuilder.add(PULL_ALL_OPERATOR, removeAllUpdates.get());
        }

        if (!updateBuilder.isEmpty()) {
            DBObject update = updateBuilder.get();

            _log.debug("Update query = {}", update);

            WriteResult writeResult = collection.update(criteria, update);

            String err = writeResult.getError();

            if (err != null) {
                throw new UpdateException(err);
            }
        }
    }

    return user;
}

From source file:org.ndsc.mimicIO.db.DBManager.java

License:Apache License

/**
 * Creates document in db from persistable object obj.
 *
 * @param obj the object to create//  www. j  a  va 2 s  . c o m
 */
public void createDoc(Persistable obj) {
    // Suppress error stream when MongoDB raises java.net.ConnectException
    // in another component (and cannot be caught)
    PrintStream ps = System.err;
    System.setErr(null);
    try {
        DBCollection collection = this.collections.get(obj.getDBName());
        collection.insert(new BasicDBObject(obj.getDBObject()));
    } catch (Exception e) {
        // Do not log when duplicate key
        // Virtual network was already stored and we're trying to create it
        // again on startup
        if (e instanceof MongoException.DuplicateKey) {
            log.warn("Skipped saving of virtual network with duplicate tenant id");
        } else {
            log.error("Failed to insert document into database: {}", e.getMessage());
        }
    } finally {
        // Restore error stream
        System.setErr(ps);
    }
}