Example usage for com.mongodb.client MongoCollection insertMany

List of usage examples for com.mongodb.client MongoCollection insertMany

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection insertMany.

Prototype

void insertMany(List<? extends TDocument> documents);

Source Link

Document

Inserts one or more documents.

Usage

From source file:com.shampan.model.PageModel.java

public ResultEvent addPhotos(String pageId, String albumId, String photoInfoList) {
    try {/* w w w .ja  va2s  .  co  m*/
        MongoCollection<PagePhotoDAO> mongoCollection = DBConnection.getInstance().getConnection()
                .getCollection(Collections.PAGEPHOTOS.toString(), PagePhotoDAO.class);
        JSONArray photoArray = new JSONArray(photoInfoList);
        ArrayList<PagePhotoDAO> photoList = new ArrayList<PagePhotoDAO>();
        String defaultImg = "";
        String photoId = "";
        String referenceId = "";
        int newTotalImg = photoArray.length();
        if (photoArray != null) {
            PagePhotoDAO photoInfoObj1 = new PagePhotoDAOBuilder().build(photoArray.get(0).toString());
            defaultImg = photoInfoObj1.getImage();
            photoId = photoInfoObj1.getPhotoId();
            referenceId = photoInfoObj1.getReferenceId();
            PageAlbumDAO albumInfo = new PageAlbumDAO();
            albumInfo.setDefaultImg(defaultImg);
            albumInfo.setTotalImg(newTotalImg);
            albumInfo.setPhotoId(photoId);
            albumInfo.setReferenceId(referenceId);
            PageAlbumDAO oldAlbumInfo = getAlbumInfo(pageId, albumId);
            JSONObject resultJson = new JSONObject();
            Boolean refernceId = false;
            Boolean statusUpdate = false;
            if (oldAlbumInfo != null) {
                if (oldAlbumInfo.getAlbumId().equals(albumId)) {
                    if (oldAlbumInfo.getPhotoId() != null) {
                        statusUpdate = true;
                        int totalImg = newTotalImg + oldAlbumInfo.getTotalImg();
                        String coverId = PropertyProvider.get("PAGE_COVER_PHOTOS_ALBUM_ID");
                        String profileId = PropertyProvider.get("PAGE_PROFILE_PHOTOS_ALBUM_ID");
                        String timelineId = PropertyProvider.get("PAGE_TIMELINE_PHOTOS_ALBUM_ID");
                        if (albumId.equals(coverId)) {
                            refernceId = true;
                        } else if (albumId.equals(profileId)) {
                            refernceId = true;
                        } else if (albumId.equals(timelineId)) {
                            refernceId = true;
                        }
                        editAlbumTotalImg(pageId, albumId, totalImg);
                    } else {
                        editAlbum(pageId, albumId, albumInfo.toString());
                    }
                }

            } else {
                if (albumId.equals(PropertyProvider.get("PAGE_TIMELINE_PHOTOS_ALBUM_ID"))) {
                    albumInfo.setTitle(PropertyProvider.get("PAGE_TIMELINE_PHOTOS_ALBUM_TITLE"));
                } else if (albumId.equals(PropertyProvider.get("PAGE_PROFILE_PHOTOS_ALBUM_ID"))) {
                    albumInfo.setTitle(PropertyProvider.get("PAGE_PROFILE_PHOTOS_ALBUM_TITLE"));
                } else if (albumId.equals(PropertyProvider.get("PAGE_COVER_PHOTOS_ALBUM_ID"))) {
                    albumInfo.setTitle(PropertyProvider.get("PAGE_COVER_PHOTOS_ALBUM_TITLE"));
                }
                albumInfo.setPageId(pageId);
                albumInfo.setAlbumId(albumId);
                createAlbum(albumInfo.toString());

            }
            List<String> images = new ArrayList<>();
            for (int i = 0; i < newTotalImg; i++) {
                PagePhotoDAO photoInfoObj = new PagePhotoDAOBuilder().build(photoArray.get(i).toString());
                photoInfoObj.setPageId(pageId);
                photoInfoObj.setCreatedOn(utility.getCurrentTime());
                photoInfoObj.setModifiedOn(utility.getCurrentTime());
                photoInfoObj.setReferenceId(referenceId);
                photoList.add(photoInfoObj);
                if (refernceId != true && statusUpdate != false) {
                    images.add(photoInfoObj.getImage());
                }
            }
            if (refernceId != true && statusUpdate != false) {
                referenceId = oldAlbumInfo.getReferenceId();
                StatusModel statusModel = new StatusModel();
                ResultEvent rEvent = statusModel.updateStatusPhoto(referenceId, images.toString());
                if (rEvent.getResponseCode().equals(PropertyProvider.get("SUCCESSFUL_OPERATION"))) {
                }
            }
            mongoCollection.insertMany(photoList);
            PageDAO pageInfo = getPageBasicInfo(pageId);
            if (pageInfo != null) {
                this.getResultEvent().setResult(pageInfo.getTitle());
            }
            this.getResultEvent().setResponseCode(PropertyProvider.get("SUCCESSFUL_OPERATION"));
        } else {
            this.getResultEvent().setResponseCode(PropertyProvider.get("NULL_POINTER_EXCEPTION"));
        }
    } catch (Exception ex) {
        this.getResultEvent().setResponseCode(PropertyProvider.get("ERROR_EXCEPTION"));
    }
    return this.resultEvent;
}

From source file:com.shiyq.mongodb.BulkInsertThread.java

@Override
public void run() {
    MongoClient mongoClient = new MongoClient("localhost", 27017);
    MongoDatabase mongoDatabase = mongoClient.getDatabase("study");

    MongoCollection<Document> collection = mongoDatabase.getCollection("s_user");
    List<Document> list = new ArrayList<>();
    Document document;// w  w  w  .j a  va 2 s .c o  m

    int i = 0;
    while (true) {
        if (i++ < 1000) {
            document = new Document();
            document.append("id", UUID.randomUUID());
            document.append("code", code);
            document.append("name", name);
            document.append("password", password);
            document.append("create_date", new Date());
            list.add(document);
        } else {
            //System.out.println(list.size());
            collection.insertMany(list);
            list.clear();
            i = 0;
        }
    }
}

From source file:com.telefonica.iot.cygnus.backends.mongo.MongoBackendImpl.java

License:Open Source License

/**
 * Inserts a new document in the given raw collection within the given database (row-like mode).
 * @param dbName/*from w ww .  j  a  va2 s  . c  om*/
 * @param collectionName
 * @param aggregation
 * @throws Exception
 */
@Override
public void insertContextDataRaw(String dbName, String collectionName, ArrayList<Document> aggregation)
        throws Exception {
    MongoDatabase db = getDatabase(dbName);
    MongoCollection collection = db.getCollection(collectionName);
    collection.insertMany(aggregation);
}

From source file:documentation.ChangeStreamSamples.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from w w w  .  j a v  a  2  s .  c om
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = MongoClients.create("mongodb://localhost:27017,localhost:27018,localhost:27019");
    } else {
        mongoClient = MongoClients.create(args[0]);
    }

    // Select the MongoDB database.
    MongoDatabase database = mongoClient.getDatabase("testChangeStreams");
    database.drop();
    sleep();

    // Select the collection to query.
    MongoCollection<Document> collection = database.getCollection("documents");

    /*
     * Example 1
     * Create a simple change stream against an existing collection.
     */
    System.out.println("1. Initial document from the Change Stream:");

    // Create the change stream cursor.
    MongoChangeStreamCursor<ChangeStreamDocument<Document>> cursor = collection.watch().cursor();

    // Insert a test document into the collection.
    collection.insertOne(Document.parse("{username: 'alice123', name: 'Alice'}"));
    ChangeStreamDocument<Document> next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 2
     * Create a change stream with 'lookup' option enabled.
     * The test document will be returned with a full version of the updated document.
     */
    System.out.println("2. Document from the Change Stream, with lookup enabled:");

    // Create the change stream cursor.
    cursor = collection.watch().fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Update the test document.
    collection.updateOne(Document.parse("{username: 'alice123'}"),
            Document.parse("{$set : { email: 'alice@example.com'}}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 3
     * Create a change stream with 'lookup' option using a $match and ($redact or $project) stage.
     */
    System.out.println(
            "3. Document from the Change Stream, with lookup enabled, matching `update` operations only: ");

    // Insert some dummy data.
    collection.insertMany(asList(Document.parse("{updateMe: 1}"), Document.parse("{replaceMe: 1}")));

    // Create $match pipeline stage.
    List<Bson> pipeline = singletonList(
            Aggregates.match(Filters.or(Document.parse("{'fullDocument.username': 'alice123'}"),
                    Filters.in("operationType", asList("update", "replace", "delete")))));

    // Create the change stream cursor with $match.
    cursor = collection.watch(pipeline).fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Forward to the end of the change stream
    next = cursor.tryNext();

    // Update the test document.
    collection.updateOne(Filters.eq("updateMe", 1), Updates.set("updated", true));
    next = cursor.next();
    System.out.println(format("Update operationType: %s %n %s", next.getUpdateDescription(), next));

    // Replace the test document.
    collection.replaceOne(Filters.eq("replaceMe", 1), Document.parse("{replaced: true}"));
    next = cursor.next();
    System.out.println(format("Replace operationType: %s", next));

    // Delete the test document.
    collection.deleteOne(Filters.eq("username", "alice123"));
    next = cursor.next();
    System.out.println(format("Delete operationType: %s", next));
    cursor.close();
    sleep();

    /**
     * Example 4
     * Resume a change stream using a resume token.
     */
    System.out.println("4. Document from the Change Stream including a resume token:");

    // Get the resume token from the last document we saw in the previous change stream cursor.
    BsonDocument resumeToken = cursor.getResumeToken();
    System.out.println(resumeToken);

    // Pass the resume token to the resume after function to continue the change stream cursor.
    cursor = collection.watch().resumeAfter(resumeToken).cursor();

    // Insert a test document.
    collection.insertOne(Document.parse("{test: 'd'}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
}

From source file:eu.vital.vitalcep.restApp.filteringApi.StaticFiltering.java

/**
 * Creates a filter.//from w ww  . j  a va2 s.  c om
 *
 * @param info
 * @return the filter id 
 * @throws java.io.IOException 
 */
@POST
@Path("filterstaticdata")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response filterstaticdata(String info, @Context HttpServletRequest req)
        throws IOException, UnsupportedEncodingException, NoSuchAlgorithmException {

    JSONObject jo = new JSONObject(info);

    if (jo.has("dolceSpecification") && jo.has("data")) { // && jo.has("data") for demo

        MongoClient mongo = new MongoClient(new MongoClientURI(mongoURL));
        MongoDatabase db = mongo.getDatabase(mongoDB);

        try {
            db.getCollection("staticdatafilters");
        } catch (Exception e) {
            //System.out.println("Mongo is down");
            db = null;
            if (mongo != null) {
                mongo.close();
                mongo = null;
            }
            return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();

        }

        if (jo.has("dolceSpecification")) {

            //Filter oFilter = new Filter(filter);
            JSONObject dsjo = jo.getJSONObject("dolceSpecification");
            String str = dsjo.toString();//"{\"dolceSpecification\": "+ dsjo.toString()+"}";

            try {

                DolceSpecification ds = new DolceSpecification(str);

                if (!(ds instanceof DolceSpecification)) {
                    return Response.status(Response.Status.BAD_REQUEST).build();
                }

                String mqin = RandomStringUtils.randomAlphanumeric(8);
                String mqout = RandomStringUtils.randomAlphanumeric(8);

                JSONArray aData = jo.getJSONArray("data");

                CEP cepProcess = new CEP();

                if (!(cepProcess.CEPStart(CEP.CEPType.DATA, ds, mqin, mqout, confFile, aData.toString(),
                        null))) {
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                String clientName = "collector_" + RandomStringUtils.randomAlphanumeric(4);

                if (cepProcess.PID < 1) {
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                UUID uuid = UUID.randomUUID();
                String randomUUIDString = uuid.toString();

                DBObject dbObject = createCEPFilterStaticSensorJsonld(info, randomUUIDString, jo, dsjo,
                        "vital:CEPFilterStaticDataSensor");
                Document doc = new Document(dbObject.toMap());

                try {
                    db.getCollection("staticdatafilters").insertOne(doc);
                    String id = doc.get("_id").toString();

                } catch (MongoException ex) {
                    db = null;
                    if (mongo != null) {
                        mongo.close();
                        mongo = null;
                    }
                    return Response.status(Response.Status.BAD_REQUEST).build();
                }

                JSONObject opState = createOperationalStateObservation(randomUUIDString);

                DBObject oPut = (DBObject) JSON.parse(opState.toString());
                Document doc1 = new Document(oPut.toMap());

                try {
                    db.getCollection("staticdatafiltersobservations").insertOne(doc1);
                    String id = doc1.get("_id").toString();

                } catch (MongoException ex) {
                    db = null;
                    if (mongo != null) {
                        mongo.close();
                        mongo = null;
                    }
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                /////////////////////////////////////////////////////
                // creates client and messages process
                //
                MqttAllInOne oMqtt = new MqttAllInOne();
                TMessageProc MsgProcc = new TMessageProc();

                /////////////////////////////////////////////////////////////////////////
                // PREPARING DOLCE INPUT
                Decoder decoder = new Decoder();
                ArrayList<String> simpleEventAL = decoder.JsonldArray2DolceInput(aData);

                String sal = simpleEventAL.toString();
                /////////////////////////////////////////////////////////////////////////////
                // SENDING TO MOSQUITTO
                oMqtt.sendMsg(MsgProcc, clientName, simpleEventAL, mqin, mqout, false);

                /////////////////////////////////////////////////////////////////////////////
                //RECEIVING FROM MOSQUITO               
                ArrayList<MqttMsg> mesagges = MsgProcc.getMsgs();

                ArrayList<Document> outputL;
                outputL = new ArrayList<>();

                Encoder encoder = new Encoder();

                outputL = encoder.dolceOutputList2ListDBObject(mesagges, host, randomUUIDString);

                String sOutput = "[";
                for (int i = 0; i < outputL.size(); i++) {
                    Document element = outputL.get(i);

                    if (i == 0) {
                        sOutput = sOutput + element.toJson();
                    }
                    sOutput = sOutput + "," + element.toJson();
                }

                sOutput = sOutput + "]";

                StringBuilder ck = new StringBuilder();

                try {
                    Security slogin = new Security();

                    Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck);
                    if (!token) {
                        return Response.status(Response.Status.UNAUTHORIZED).build();
                    }
                    cookie = ck.toString();

                    DMSManager oDMS = new DMSManager(dmsURL, cookie);

                    MongoCollection<Document> collection = db.getCollection("staticdatafiltersobservations");

                    if (outputL.size() > 0) {
                        collection.insertMany(outputL);
                        if (!oDMS.pushObservations(sOutput)) {
                            java.util.logging.Logger.getLogger(StaticFiltering.class.getName())
                                    .log(Level.SEVERE, "couldn't save to the DMS");
                        }
                    }

                } catch (KeyManagementException | KeyStoreException ex) {
                    db = null;
                    if (mongo != null) {
                        mongo.close();
                        mongo = null;
                    }
                    java.util.logging.Logger.getLogger(MessageProcessor_publisher.class.getName())
                            .log(Level.SEVERE, null, ex);
                }
                //cepProcess.
                try {
                    CepContainer.deleteCepProcess(cepProcess.PID);

                    if (!cepProcess.cepDispose()) {
                        java.util.logging.Logger.getLogger(StaticFiltering.class.getName()).log(Level.SEVERE,
                                "couldn't terminate ucep");
                    }
                } catch (Exception e) {
                    java.util.logging.Logger.getLogger(StaticFiltering.class.getName()).log(Level.SEVERE, null,
                            e);
                }
                db = null;
                if (mongo != null) {
                    mongo.close();
                    mongo = null;
                }
                return Response.status(Response.Status.OK).entity(sOutput).build();

            } catch (IOException | JSONException | java.text.ParseException e) {
                db = null;
                if (mongo != null) {
                    mongo.close();
                    mongo = null;
                }
                return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
            }
        }

        return Response.status(Response.Status.BAD_REQUEST).build();

    }

    return Response.status(Response.Status.BAD_REQUEST).build();

}

From source file:eu.vital.vitalcep.restApp.filteringApi.StaticFiltering.java

/**
 * Gets a filter./*  w  w  w  . j  a v a  2s .c o  m*/
 *
 * @param info
     * @param req
 * @return the filter 
 * @throws java.io.IOException 
 */
@POST
@Path("filterstaticquery")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response filterstaticquery(String info, @Context HttpServletRequest req) throws IOException {

    JSONObject jo = new JSONObject(info);

    if (jo.has("dolceSpecification") && jo.has("query")) {
        // && jo.has("data") for demo
        StringBuilder ck = new StringBuilder();
        Security slogin = new Security();

        JSONObject credentials = new JSONObject();

        Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck);
        credentials.put("username", req.getHeader("name"));
        credentials.put("password", req.getHeader("password"));
        if (!token) {
            return Response.status(Response.Status.UNAUTHORIZED).build();
        }
        this.cookie = ck.toString();

        MongoClient mongo = new MongoClient(new MongoClientURI(mongoURL));
        MongoDatabase db = mongo.getDatabase(mongoDB);

        try {
            db.getCollection("staticqueryfilters");
        } catch (Exception e) {
            //System.out.println("Mongo is down");
            db = null;
            if (mongo != null) {
                mongo.close();
                mongo = null;
            }
            return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();

        }

        if (jo.has("dolceSpecification")) {

            //Filter oFilter = new Filter(filter);
            JSONObject dsjo = jo.getJSONObject("dolceSpecification");
            String str = dsjo.toString();//"{\"dolceSpecification\": "+ dsjo.toString()+"}";

            try {

                DolceSpecification ds = new DolceSpecification(str);

                if (!(ds instanceof DolceSpecification)) {
                    return Response.status(Response.Status.BAD_REQUEST).build();
                }

                String mqin = RandomStringUtils.randomAlphanumeric(8);
                String mqout = RandomStringUtils.randomAlphanumeric(8);

                CEP cepProcess = new CEP();

                if (!(cepProcess.CEPStart(CEP.CEPType.QUERY, ds, mqin, mqout, confFile, jo.getString("query"),
                        null))) {
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                String clientName = "collector_" + RandomStringUtils.randomAlphanumeric(4);

                if (cepProcess.PID < 1) {
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                UUID uuid = UUID.randomUUID();
                String randomUUIDString = uuid.toString();

                DBObject dbObject = createCEPFilterStaticSensorJsonld(info, randomUUIDString, jo, dsjo,
                        "vital:CEPFilterStaticQuerySensor");
                Document doc = new Document(dbObject.toMap());

                try {
                    db.getCollection("staticqueryfilters").insertOne(doc);
                    String id = doc.get("_id").toString();

                } catch (MongoException ex) {
                    db = null;
                    if (mongo != null) {
                        mongo.close();
                        mongo = null;
                    }
                    return Response.status(Response.Status.BAD_REQUEST).build();
                }

                JSONObject opState = createOperationalStateObservation(randomUUIDString);

                DBObject oPut = (DBObject) JSON.parse(opState.toString());
                Document doc1 = new Document(oPut.toMap());

                try {
                    db.getCollection("staticqueryfiltersobservations").insertOne(doc1);
                    String id = doc1.get("_id").toString();

                } catch (MongoException ex) {
                    db = null;
                    if (mongo != null) {
                        mongo.close();
                        mongo = null;
                    }
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                /////////////////////////////////////////////////////
                // creates client and messages process
                //
                MqttAllInOne oMqtt = new MqttAllInOne();
                TMessageProc MsgProcc = new TMessageProc();

                JSONArray aData = new JSONArray();

                try {

                    DMSManager oDMS = new DMSManager(dmsURL, cookie);

                    aData = oDMS.getObservations(jo.getString("query"));

                    if (aData.length() < 1) {
                        CepContainer.deleteCepProcess(cepProcess.PID);

                        if (!cepProcess.cepDispose()) {
                            java.util.logging.Logger.getLogger(StaticFiltering.class.getName())
                                    .log(Level.SEVERE, "bcep Instance not terminated");
                        }
                        ;
                        db = null;
                        if (mongo != null) {
                            mongo.close();
                            mongo = null;
                        }
                        return Response.status(Response.Status.BAD_REQUEST).entity("no data to be filtered")
                                .build();
                    }

                } catch (KeyManagementException | KeyStoreException ex) {
                    java.util.logging.Logger.getLogger(StaticFiltering.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                //DMSManager oDMS = new DMSManager(dmsURL,req.getHeader("vitalAccessToken"));

                /////////////////////////////////////////////////////////////////////////
                // PREPARING DOLCE INPUT
                Decoder decoder = new Decoder();
                ArrayList<String> simpleEventAL = decoder.JsonldArray2DolceInput(aData);

                /////////////////////////////////////////////////////////////////////////////
                // SENDING TO MOSQUITTO
                oMqtt.sendMsg(MsgProcc, clientName, simpleEventAL, mqin, mqout, false);

                /////////////////////////////////////////////////////////////////////////////
                //RECEIVING FROM MOSQUITO
                ArrayList<MqttMsg> mesagges = MsgProcc.getMsgs();

                //FORMATTING OBSERVATIONS OUTPUT
                Encoder encoder = new Encoder();

                ArrayList<Document> outputL;
                outputL = new ArrayList<>();

                outputL = encoder.dolceOutputList2ListDBObject(mesagges, host, randomUUIDString);

                String sOutput = "[";
                for (int i = 0; i < outputL.size(); i++) {
                    Document element = outputL.get(i);

                    if (i == 0) {
                        sOutput = sOutput + element.toJson();
                    }
                    sOutput = sOutput + "," + element.toJson();
                }

                sOutput = sOutput + "]";

                try {

                    DMSManager pDMS = new DMSManager(dmsURL, cookie);

                    MongoCollection<Document> collection = db.getCollection("staticqueryfiltersobservations");

                    if (outputL.size() > 0) {
                        collection.insertMany(outputL);
                        if (!pDMS.pushObservations(sOutput)) {
                            java.util.logging.Logger.getLogger(StaticFiltering.class.getName())
                                    .log(Level.SEVERE, "coudn't save to the DMS");
                        }
                    }

                } catch (IOException | KeyManagementException | NoSuchAlgorithmException
                        | KeyStoreException ex) {
                    db = null;
                    if (mongo != null) {
                        mongo.close();
                        mongo = null;
                    }
                    java.util.logging.Logger.getLogger(StaticFiltering.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                CepContainer.deleteCepProcess(cepProcess.PID);

                if (!cepProcess.cepDispose()) {
                    java.util.logging.Logger.getLogger(StaticFiltering.class.getName()).log(Level.SEVERE,
                            "bcep Instance not terminated");
                }
                ;
                db = null;
                if (mongo != null) {
                    mongo.close();
                    mongo = null;
                }
                return Response.status(Response.Status.OK).entity(sOutput).build();

            } catch (IOException | JSONException | NoSuchAlgorithmException | java.text.ParseException e) {
                db = null;
                if (mongo != null) {
                    mongo.close();
                    mongo = null;
                }
                return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
            }
        }

        return Response.status(Response.Status.BAD_REQUEST).build();

    }

    return Response.status(Response.Status.BAD_REQUEST).build();

}

From source file:examples.tour.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from   www  . j  ava 2s .co  m
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = new MongoClient();
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {

        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Aggregation
    collection
            .aggregate(
                    asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}"))))
            .forEach(printBlock);

    myDoc = collection.aggregate(singletonList(group(null, sum("total", "$i")))).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), set("i", 110));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100), inc("i", 100));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    //collection.find().forEach(printBlock);

    // Clean up
    database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongodb.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *//*w  ww.  j av  a2 s.co  m*/
public static void main(final String[] args) {

    //represents a pool of connections to the database
    MongoClient mongoClient = new MongoClient("10.9.17.105", 27017);

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("test");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    collection.find().forEach(printBlock);

    // Clean up
    //        database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongoSample.MongoSample.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args//w w w . j  a  va2 s.c o m
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    String mongoServer = args[0];

    MongoClient mongoClient = new MongoClient(mongoServer);
    MongoDatabase database = mongoClient.getDatabase("sakila");
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest
    // earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can
    // explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc);
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    // collection.find().forEach(printBlock);

    // Clean up
    //database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongotwitter.MongoTwitter.java

public void tweet(String body) {
    final ObjectId tweet_id = new ObjectId();
    final Date time = new Date();
    MongoCollection<Document> tweets = db.getCollection("tweets");
    MongoCollection<Document> userline = db.getCollection("userline");
    MongoCollection<Document> timeline = db.getCollection("timeline");
    MongoCollection<Document> followers = db.getCollection("followers");

    Document tweetDoc = new Document("tweet_id", tweet_id).append("username", nick).append("body", body);

    Document userDoc = new Document("username", nick).append("time", time).append("tweet_id", tweet_id);

    List<Document> timelineList = new ArrayList<>();
    List<Document> followerList = followers.find(eq("username", nick)).into(new ArrayList<Document>());
    for (Document doc : followerList) {
        String follower = (String) doc.get("follower");
        Document timeDoc = new Document("username", follower).append("time", time).append("tweet_id", tweet_id);
        timelineList.add(timeDoc);//  w  w  w  .ja  va2 s .  c o  m
    }

    tweets.insertOne(tweetDoc);
    userline.insertOne(userDoc);
    timeline.insertMany(timelineList);

    System.out.println("* You tweeted \"" + body + "\" at " + time);
}