Example usage for com.mongodb DBCollection insert

List of usage examples for com.mongodb DBCollection insert

Introduction

In this page you can find the example usage for com.mongodb DBCollection insert.

Prototype

public WriteResult insert(final List<? extends DBObject> documents) 

Source Link

Document

Insert documents into a collection.

Usage

From source file:com.images3.data.impl.ImagePlantAccessImplMongoDB.java

License:Apache License

public void insertImagePlant(ImagePlantOS imagePlant) {
    DBCollection coll = getDatabase().getCollection("ImagePlant");
    coll.insert(getObjectMapper().mapToBasicDBObject(imagePlant));
}

From source file:com.images3.data.impl.MongoDBAccess.java

License:Apache License

private void insertPageCursor(PageCursor cursor) {
    DBCollection coll = getDatabase().getCollection("PageCursor");
    coll.insert(getObjectMapper().mapToBasicDBObject(cursor));
}

From source file:com.images3.data.impl.TemplateAccessImplMongoDB.java

License:Apache License

public void insertTemplate(TemplateOS template) {
    DBCollection coll = getDatabase().getCollection("Template");
    coll.insert(getObjectMapper().mapToBasicDBObject(template));
}

From source file:com.impetus.kundera.client.MongoDBClient.java

License:Apache License

@Override
public void writeColumns(EntityManagerImpl em, EnhancedEntity e, EntityMetadata m) throws Exception {
    String dbName = m.getKeyspaceName();
    String documentName = m.getColumnFamilyName();
    String key = e.getId();//  w w  w.ja v a2s.c o  m

    log.debug("Checking whether record already exist for " + dbName + "." + documentName + " for " + key);
    Object entity = loadColumns(em, m.getEntityClazz(), dbName, documentName, key, m);
    if (entity != null) {
        log.debug("Updating data into " + dbName + "." + documentName + " for " + key);
        DBCollection dbCollection = mongoDb.getCollection(documentName);

        BasicDBObject searchQuery = new BasicDBObject();
        searchQuery.put(m.getIdColumn().getName(), key);
        BasicDBObject updatedDocument = new MongoDBDataHandler().getDocumentFromEntity(em, m, e.getEntity());
        dbCollection.update(searchQuery, updatedDocument);

    } else {
        log.debug("Inserting data into " + dbName + "." + documentName + " for " + key);
        DBCollection dbCollection = mongoDb.getCollection(documentName);

        BasicDBObject document = new MongoDBDataHandler().getDocumentFromEntity(em, m, e.getEntity());
        dbCollection.insert(document);
    }

}

From source file:com.intellijob.MongoConfiguration.java

License:Apache License

@Autowired
@Bean//from w w w  . j a v a2s  . c  o m
public Boolean doImportData(MongoClient mongoClient) throws IOException {

    DBCollection sys_import_collection = mongoClient.getDB(this.properties.getDatabase())
            .getCollection(ApplicationSettings.COLLECTION_NAME);
    if (isProduction && sys_import_collection.count() == 0) {
        LOG.info("IMPORT DATA =============================================>");

        //Import collection skill_caegories.
        loadCollectionSkillCategories(mongoClient);

        //Import languages
        loadSkillsData(mongoClient, "skill_languages.json", "skill_languages");

        //Import knowledges
        loadSkillsData(mongoClient, "skill_knowledge.json", "skill_knowledges");

        //Import personal strength
        loadSkillsData(mongoClient, "skill_personalstrengths.json", "skill_personalstrengths");

        DBObject row = new BasicDBObject();
        row.put(ApplicationSettings.FIELD_MONGO_DATA_IMPORTED, true);
        row.put(ApplicationSettings.FIELD_MONGO_DATA_IMPORTED_DATE, new Date());
        row.put(ApplicationSettings.FIELD_ELASTIC_DATA_IMPORTED, false);
        row.put(ApplicationSettings.FIELD_ELASTIC_DATA_IMPORTED_DATE, null);

        sys_import_collection.insert(row);
        LOG.info("IMPORT DATA FINISHED!");
        return true;
    }

    return false;
}

From source file:com.intellijob.MongoConfiguration.java

License:Apache License

/**
 * Import collection skill_categories./*w w  w.j  a v  a  2 s  .c o m*/
 */
private void loadCollectionSkillCategories(MongoClient mongoClient) {

    InputStream inputStream = Thread.currentThread().getContextClassLoader()
            .getResourceAsStream("imports/skills_categories.json");

    String collectionName = "skill_categories";
    try {
        LOG.info("LOAD {} DATA ....................................", collectionName);
        DBCollection col = mongoClient.getDB(this.properties.getDatabase()).getCollection(collectionName);

        List<Map<String, Object>> categories = new ObjectMapper().readValue(inputStream,
                TypeFactory.defaultInstance().constructCollectionType(List.class, HashMap.class));

        for (Map<String, Object> category : categories) {
            DBObject dbObject = new BasicDBObject(category);
            dbObject.put("_id", new ObjectId(category.get("_id").toString()));
            col.insert(dbObject);
        }
        LOG.info("DONE!");
    } catch (Exception e) {
        LOG.error("Collection (" + collectionName + ") could not be imported successfully!", e);
    }
}

From source file:com.intellijob.MongoConfiguration.java

License:Apache License

/**
 * Import supported skill data.//from w ww  .  j a va 2  s  . c o m
 */
private void loadSkillsData(MongoClient mongoClient, String jsonFile, String collectionName) {
    try {
        LOG.info("LOAD {} DATA .........................................", collectionName);
        InputStream inputStream = Thread.currentThread().getContextClassLoader()
                .getResourceAsStream("imports/" + jsonFile);
        DBCollection col = mongoClient.getDB(this.properties.getDatabase()).getCollection(collectionName);

        TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() {
        };
        Map<String, Object> mapData = new ObjectMapper().readValue(inputStream, typeRef);

        setObjectIdRecursive(mapData);
        DBObject dbObject = new BasicDBObject(mapData);
        col.insert(dbObject);
        LOG.info("DONE!");
    } catch (Exception e) {
        LOG.error("Collection (" + collectionName + ") could not be imported successfully!", e);
    }
}

From source file:com.intuit.utils.PopulateTweets.java

public static void main(String[] args) {
    Date now = new Date();
    System.out.println("Current date is: " + now.toString());

    MongoClient mongo = new MongoClient("localhost", 27017);
    DB db = mongo.getDB("tweetsdb");
    DBCollection collection = db.getCollection("tweetscollection");
    WriteResult result = collection.remove(new BasicDBObject());

    String[] users = { "user1", "user2", "user3", "user4", "user5", "user6", "user7", "user8", "user9",
            "user10" };
    // I am not introducing enough randomness in terms of the insertion of 
    // tweets for users at a random time orders, due to lack of time.
    for (String user : users) {
        int tweetIndex = 0;
        for (int i = 1; i <= 10; i++) {
            BasicDBObject document = new BasicDBObject();
            // This is a way to maintain uniqueness of the tweetid value across the system
            // Ideally, this should be the "_id" value, but due to lack of time, I am skipping
            // that part.  That would help to partition the tweets across multiple shards in a 
            // large scale system.
            String tweetId = user + "|tweet" + tweetIndex;
            document.put("tweetId", tweetId);
            document.put("user", user);
            document.put("text", "tweet number" + tweetIndex);
            document.put("tweetedOn", new Date().toString());
            System.out.println("tweet number: " + tweetIndex + "   " + document.toString());
            collection.insert(document);
            tweetIndex++;//  w  w w . j  ava2  s.  c o m
            try {
                // Just introducing some delay between tweets to make the testing a bit easy
                Thread.sleep(3000);
            } catch (InterruptedException ex) {
                Logger.getLogger(PopulateTweets.class.getName()).log(Level.SEVERE, null, ex);
            }
        }

    }

    BasicDBObject indexObj = new BasicDBObject();
    indexObj.put("user", 1);
    indexObj.put("tweetedOn", -1);
    collection.createIndex(indexObj);

    BasicDBObject tweetIdObj = new BasicDBObject();
    tweetIdObj.put("tweetId", 1);
    collection.createIndex(tweetIdObj);
}

From source file:com.intuit.utils.PopulateUsers.java

public static void main(String[] args) {
    Date now = new Date();
    System.out.println("Current date is: " + now.toString());

    MongoClient mongo = new MongoClient("localhost", 27017);
    DB db = mongo.getDB("tweetsdb");
    DBCollection collection = db.getCollection("userscollection");
    WriteResult result = collection.remove(new BasicDBObject());

    int userIndex = 1;
    for (int i = 1; i <= 10; i++) {
        JSONObject userDocument = new JSONObject();
        String user = "user" + userIndex;
        userDocument.put("user", user);

        JSONArray followerList = new JSONArray();
        Random randomGenerator = new Random();
        for (int j = 0; j < 3; j++) {
            int followerId = randomGenerator.nextInt(10) + 1;
            // Assumption here is, a user will not be a follower on himself
            while (followerId == userIndex) {
                followerId = randomGenerator.nextInt(10) + 1;
            }/* www. j  a  v  a  2 s  . c  o m*/

            String follower = "user" + followerId;
            if (!followerList.contains(follower)) {
                followerList.add(follower);
            }
        }
        userDocument.put("followers", followerList);

        JSONArray followingList = new JSONArray();
        for (int k = 0; k < 3; k++) {
            int followingId = randomGenerator.nextInt(10) + 1;
            // Assumption here is, a user will not be following his own tweets
            while (followingId == userIndex) {
                followingId = randomGenerator.nextInt(10) + 1;
            }

            String followingUser = "user" + followingId;
            if (!followingList.contains(followingUser)) {
                followingList.add(followingUser);
            }
        }
        userDocument.put("following", followingList);
        System.out.println("Json string is: " + userDocument.toString());
        DBObject userDBObject = (DBObject) JSON.parse(userDocument.toString());
        collection.insert(userDBObject);
        userIndex++;

    }

    //        try {
    //            FileWriter file = new FileWriter("/Users/dmurty/Documents/MongoData/usersCollection.js");
    //            file.write(usersArray.toJSONString());
    //            file.flush();
    //            file.close();
    //        } catch (IOException ex) {
    //            Logger.getLogger(PopulateUsers.class.getName()).log(Level.SEVERE, null, ex);
    //        } 
}

From source file:com.jaspersoft.mongodb.importer.MongoDbImporter.java

License:Open Source License

public void importTable(String tableName) throws Exception {
    createConnection();//from  ww  w.  j  a v  a 2 s .co  m
    logger.info("Initialize import");
    ResultSet resultSet = null;
    List<DBObject> objectsList = new ArrayList<DBObject>();
    try {
        resultSet = statement.executeQuery("SELECT * FROM " + tableName);
        ResultSetMetaData metaData = resultSet.getMetaData();
        int index, columnCount = metaData.getColumnCount(), count = 0;
        logger.info("Importing rows");
        DBCollection collection = null;
        if (!mongodbConnection.getMongoDatabase().collectionExists(tableName)) {
            logger.info("Collection \"" + tableName + "\" doesn't exist");
            DBObject options = new BasicDBObject("capped", false);
            collection = mongodbConnection.getMongoDatabase().createCollection(tableName, options);
        } else {
            logger.info("Collection \"" + tableName + "\" exists");
            collection = mongodbConnection.getMongoDatabase().getCollectionFromString(tableName);
            collection.drop();
            logger.info("Collection \"" + tableName + "\" was cleaned up");
        }
        Object value;
        DBObject newObject;
        while (resultSet.next()) {
            newObject = new BasicDBObject();
            for (index = 1; index <= columnCount; index++) {
                value = resultSet.getObject(index);
                if (value != null) {
                    newObject.put(metaData.getColumnName(index), value);
                }
            }
            objectsList.add(newObject);
            count++;
            if (count % 100 == 0) {
                logger.info("Processed: " + count);
                logger.info("Result: " + collection.insert(objectsList).getField("ok"));
                objectsList.clear();
            }
        }
        if (objectsList.size() > 0) {
            collection.insert(objectsList);
            logger.info("Result: " + collection.insert(objectsList).getField("ok"));
            objectsList.clear();
        }
        logger.info("Rows added: " + count);
        logger.info("Import done");
    } finally {
        if (resultSet != null) {
            resultSet.close();
        }
    }
}