Example usage for com.mongodb.client MongoCollection insertMany

List of usage examples for com.mongodb.client MongoCollection insertMany

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection insertMany.

Prototype

void insertMany(List<? extends TDocument> documents);

Source Link

Document

Inserts one or more documents.

Usage

From source file:org.apache.nifi.processors.mongodb.PutMongoRecord.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;/*from w w  w .j  a va 2  s  .c om*/
    }

    final RecordReaderFactory recordParserFactory = context.getProperty(RECORD_READER_FACTORY)
            .asControllerService(RecordReaderFactory.class);

    final WriteConcern writeConcern = getWriteConcern(context);

    final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern);

    List<Document> inserts = new ArrayList<>();
    int ceiling = context.getProperty(INSERT_COUNT).asInteger();
    int added = 0;
    boolean error = false;

    try (RecordReader reader = recordParserFactory.createRecordReader(flowFile, session.read(flowFile),
            getLogger())) {
        RecordSchema schema = reader.getSchema();
        Record record;
        while ((record = reader.nextRecord()) != null) {
            Document document = new Document();
            for (String name : schema.getFieldNames()) {
                document.put(name, record.getValue(name));
            }
            inserts.add(document);
            if (inserts.size() == ceiling) {
                collection.insertMany(inserts);
                added += inserts.size();
                inserts = new ArrayList<>();
            }
        }
        if (inserts.size() > 0) {
            collection.insertMany(inserts);
        }
    } catch (SchemaNotFoundException | IOException | MalformedRecordException e) {
        getLogger().error("PutMongoRecord failed with error:", e);
        session.transfer(flowFile, REL_FAILURE);
        error = true;
    } finally {
        if (!error) {
            session.getProvenanceReporter().send(flowFile, context.getProperty(URI).getValue(),
                    String.format("Added %d documents to MongoDB.", added));
            session.transfer(flowFile, REL_SUCCESS);
            getLogger().info("Inserted {} records into MongoDB", new Object[] { added });
        }
    }
    session.commit();
    /*        final ComponentLog logger = getLogger();
            
            if (inserts.size() > 0) {
    try {
        collection.insertMany(inserts);
            
        session.getProvenanceReporter().send(flowFile, context.getProperty(URI).getValue());
        session.transfer(flowFile, REL_SUCCESS);
            
    } catch (Exception e) {
        logger.error("Failed to insert {} into MongoDB due to {}", new Object[]{flowFile, e}, e);
        session.transfer(flowFile, REL_FAILURE);
        context.yield();
    }
            }*/
}

From source file:org.axonframework.mongo.eventsourcing.eventstore.AbstractMongoEventStorageStrategy.java

License:Apache License

@Override
public void appendEvents(MongoCollection<Document> eventCollection, List<? extends EventMessage<?>> events,
        Serializer serializer) {//from   w  ww  .ja v  a  2s .  c om
    eventCollection.insertMany(createEventDocuments(events, serializer).collect(Collectors.toList()));
}

From source file:org.nuxeo.directory.mongodb.MongoDBReference.java

License:Apache License

/**
 * Adds the links between the source id and the target ids
 * //  ww w.j a v a  2 s . c  om
 * @param sourceId the source id
 * @param targetIds the target ids
 * @param session the mongoDB session
 * @throws DirectoryException
 */
public void addLinks(String sourceId, List<String> targetIds, MongoDBSession session)
        throws DirectoryException {
    if (!initialized) {
        if (dataFileName != null) {
            initializeSession(session);
        }
        initialized = true;
    }
    if (targetIds == null || targetIds.isEmpty()) {
        return;
    }
    try {
        MongoCollection<Document> coll = session.getCollection(collection);
        List<Document> newDocs = targetIds.stream().map(targetId -> buildDoc(sourceId, targetId))
                .filter(doc -> coll.count(doc) == 0).collect(Collectors.toList());
        coll.insertMany(newDocs);
    } catch (MongoWriteException e) {
        throw new DirectoryException(e);
    }
}

From source file:org.nuxeo.directory.mongodb.MongoDBReference.java

License:Apache License

@Override
public void addLinks(List<String> sourceIds, String targetId) throws DirectoryException {
    if (sourceIds == null || sourceIds.isEmpty()) {
        return;/*from   ww  w.j  a  v a2 s .c  o m*/
    }
    try (MongoDBSession session = getMongoDBSession()) {
        MongoCollection<Document> coll = session.getCollection(collection);
        List<Document> newDocs = sourceIds.stream().map(sourceId -> buildDoc(sourceId, targetId))
                .filter(doc -> coll.count(doc) == 0).collect(Collectors.toList());
        coll.insertMany(newDocs);
    } catch (MongoWriteException e) {
        throw new DirectoryException(e);
    }
}

From source file:org.restheart.handlers.applicationlogic.CsvLoaderHandler.java

License:Open Source License

@Override
public void handleRequest(HttpServerExchange exchange, RequestContext context) throws Exception {
    if (context.isOptions()) {
        exchange.getResponseHeaders().put(HttpString.tryFromString("Access-Control-Allow-Methods"), "POST");
        exchange.getResponseHeaders().put(HttpString.tryFromString("Access-Control-Allow-Headers"),
                "Accept, Accept-Encoding, Authorization, " + "Content-Length, Content-Type, Host, Origin, "
                        + "X-Requested-With, User-Agent, " + "No-Auth-Challenge, " + AUTH_TOKEN_HEADER + ", "
                        + AUTH_TOKEN_VALID_HEADER + ", " + AUTH_TOKEN_LOCATION_HEADER);
        exchange.setStatusCode(HttpStatus.SC_OK);
        exchange.endExchange();//from   w ww.ja v  a 2 s .c  o  m
    } else {
        exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, Representation.JSON_MEDIA_TYPE);
        if (doesApply(context)) {
            if (checkContentType(exchange)) {
                if (checkQueryParameters(exchange)) {
                    try {
                        List<BsonDocument> documents = parseCsv(context.getRawContent());

                        if (documents != null && documents.size() > 0) {
                            MongoCollection<BsonDocument> mcoll = MongoDBClientSingleton.getInstance()
                                    .getClient().getDatabase(db).getCollection(coll, BsonDocument.class);

                            mcoll.insertMany(documents);
                            exchange.setStatusCode(HttpStatus.SC_OK);
                        } else {
                            exchange.setStatusCode(HttpStatus.SC_NOT_MODIFIED);
                        }
                    } catch (IOException ex) {
                        LOGGER.error("error parsing CSV data", ex);
                        exchange.setStatusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR);
                        exchange.getResponseSender()
                                .send(getError(HttpStatus.SC_INTERNAL_SERVER_ERROR, ERROR_PARSING_DATA));
                    }
                } else {
                    exchange.setStatusCode(HttpStatus.SC_BAD_REQUEST);
                    exchange.getResponseSender().send(getError(HttpStatus.SC_BAD_REQUEST, ERROR_QPARAM));
                }
            } else {
                exchange.setStatusCode(HttpStatus.SC_BAD_REQUEST);
                exchange.getResponseSender().send(getError(HttpStatus.SC_BAD_REQUEST, ERROR_CONTENT_TYPE));
            }

        } else {
            exchange.getResponseSender().send(getError(HttpStatus.SC_NOT_IMPLEMENTED, ERROR_WRONG_METHOD));

            exchange.setStatusCode(HttpStatus.SC_NOT_IMPLEMENTED);
        }

        exchange.endExchange();
    }
}

From source file:org.restheart.plugins.services.CsvLoader.java

License:Open Source License

@Override
public void handleRequest(HttpServerExchange exchange, RequestContext context) throws Exception {
    if (context.isOptions()) {
        handleOptions(exchange, context);
    } else {/*from ww  w.j a v  a  2s.  co m*/
        exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, Resource.JSON_MEDIA_TYPE);
        if (doesApply(context)) {
            if (checkContentType(exchange)) {
                try {
                    CsvRequestParams params = new CsvRequestParams(exchange);

                    try {
                        List<BsonDocument> documents = parseCsv(exchange, params, context,
                                context.getRawContent());

                        if (documents != null && documents.size() > 0) {
                            MongoCollection<BsonDocument> mcoll = MongoDBClientSingleton.getInstance()
                                    .getClient().getDatabase(params.db)
                                    .getCollection(params.coll, BsonDocument.class);

                            if (params.update) {
                                documents.stream().forEach(document -> {
                                    BsonDocument updateQuery = new BsonDocument("_id", document.remove("_id"));

                                    // for upate import, take _filter property into account
                                    // for instance, a filter allows to use $ positional array operator
                                    BsonValue _filter = document.remove(FILTER_PROPERTY);

                                    if (_filter != null && _filter.isDocument()) {
                                        updateQuery.putAll(_filter.asDocument());
                                    }
                                    if (params.upsert) {
                                        mcoll.findOneAndUpdate(updateQuery, new BsonDocument("$set", document),
                                                FAU_WITH_UPSERT_OPS);
                                    } else {

                                        mcoll.findOneAndUpdate(updateQuery, new BsonDocument("$set", document),
                                                FAU_NO_UPSERT_OPS);
                                    }
                                });
                            } else if (params.upsert) {
                                documents.stream().forEach(document -> {
                                    BsonDocument updateQuery = new BsonDocument("_id", document.remove("_id"));

                                    mcoll.findOneAndUpdate(updateQuery, new BsonDocument("$set", document),
                                            FAU_WITH_UPSERT_OPS);
                                });
                            } else {
                                mcoll.insertMany(documents);
                            }
                            context.setResponseStatusCode(HttpStatus.SC_OK);
                        } else {
                            context.setResponseStatusCode(HttpStatus.SC_NOT_MODIFIED);
                        }
                    } catch (IOException ex) {
                        LOGGER.debug("error parsing CSV data", ex);
                        ResponseHelper.endExchangeWithMessage(exchange, context, HttpStatus.SC_BAD_REQUEST,
                                ERROR_PARSING_DATA);

                    }
                } catch (IllegalArgumentException iae) {
                    ResponseHelper.endExchangeWithMessage(exchange, context, HttpStatus.SC_BAD_REQUEST,
                            ERROR_QPARAM);
                }
            } else {
                ResponseHelper.endExchangeWithMessage(exchange, context, HttpStatus.SC_BAD_REQUEST,
                        ERROR_CONTENT_TYPE);
            }

        } else {
            ResponseHelper.endExchangeWithMessage(exchange, context, HttpStatus.SC_NOT_IMPLEMENTED,
                    ERROR_WRONG_METHOD);
        }
    }
    // this clean the error message about the wrong media type
    // added by BodyInjectorHandler
    context.setResponseContent(null);

    next(exchange, context);
}

From source file:thermostatapplication.TemperaturePersisterTimerTask.java

public synchronized void persistDataOnMongolab() {
    //disable console logging
    //Logger mongoLogger = Logger.getLogger("org.mongodb.driver"); 
    //mongoLogger.setLevel(Level.SEVERE);

    iStoredTemperatures = iTemperatureStore.getTemperatures();
    if (iStoredTemperatures.isEmpty()) {
        logger.info("Nothing to persist. Exiting");
        return;//www  . j  a  v  a  2  s.co m
    }
    logger.info("Prepairing to persist [{}] Temps in the cloud", iStoredTemperatures.size());
    MongoCollection<Document> mongoCollection = null;
    MongoClient client = null;
    List<Document> documents = new ArrayList<>();

    for (TemperatureMeasure tTemp : iStoredTemperatures) { //Exception in thread "Timer-2" java.util.ConcurrentModificationException
        Document doc = new Document();
        doc.put("Location", tTemp.getLocation()); //Location
        doc.put("Group", tTemp.getGroup()); //Group
        doc.put("Date", Helper.getDateAsString(tTemp.getDate())); //Date
        doc.put("Day", Helper.getDayAsString(tTemp.getDate()));
        doc.put("Time", Helper.getTimeAsString(tTemp.getDate()));
        doc.put("Temp", Helper.getTempAsString(tTemp.getTemp())); //Temp
        documents.add(doc);
        iPersistedTemperatures.add(tTemp);
    }

    try {
        MongoClientURI uri = new MongoClientURI(ThermostatProperties.ML_URL);
        client = new MongoClient(uri);
        MongoDatabase database = (MongoDatabase) client.getDatabase(uri.getDatabase());
        mongoCollection = database.getCollection("dailytemps");
        mongoCollection.insertMany(documents);
        //eliminate stored Temps from the collection
        iTemperatureStore.removeAll(iPersistedTemperatures);
        client.close();
        logger.info("Temperatures persisted on mongolab: [{}]. Exiting.", iPersistedTemperatures.size());
        iPersistedTemperatures.clear();
    } catch (Throwable e) {
        logger.error("Failed to store Temps in the cloud. Stacktrace: [{}]. Exiting.", e);
        iPersistedTemperatures.clear();
        e.printStackTrace();
    } finally {
        if (client != null) {
            client.close();
        }
        iPersistedTemperatures.clear();
    }
}

From source file:tour.NewQuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from  w  w w.  j  a  va 2s .c o  m
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = new MongoClient();
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb");

    database.drop();

    // get a list of the collections in this database and print them out
    List<String> collectionNames = database.listCollectionNames().into(new ArrayList<String>());
    for (final String s : collectionNames) {
        System.out.println(s);
    }

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc);

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur);
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc);

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    // max time
    collection.find().maxTime(1, TimeUnit.SECONDS).first();

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));

    // getting a list of databases
    for (String name : mongoClient.listDatabaseNames()) {
        System.out.println(name);
    }

    // drop a database
    mongoClient.dropDatabase("databaseToBeDropped");

    // create a collection
    database.createCollection("cappedCollection",
            new CreateCollectionOptions().capped(true).sizeInBytes(0x100000));

    for (String name : database.listCollectionNames()) {
        System.out.println(name);
    }

    // create an ascending index on the "i" field
    collection.createIndex(new Document("i", 1));

    // list the indexes on the collection
    for (final Document index : collection.listIndexes()) {
        System.out.println(index);
    }

    // create a text index on the "content" field
    collection.createIndex(new Document("content", "text"));

    collection.insertOne(new Document("_id", 0).append("content", "textual content"));
    collection.insertOne(new Document("_id", 1).append("content", "additional content"));
    collection.insertOne(new Document("_id", 2).append("content", "irrelevant content"));

    // Find using the text index
    Document search = new Document("$search", "textual content -irrelevant");
    Document textSearch = new Document("$text", search);
    long matchCount = collection.count(textSearch);
    System.out.println("Text search matches: " + matchCount);

    // Find using the $language operator
    textSearch = new Document("$text", search.append("$language", "english"));
    matchCount = collection.count(textSearch);
    System.out.println("Text search matches (english): " + matchCount);

    // Find the highest scoring match
    Document projection = new Document("score", new Document("$meta", "textScore"));
    myDoc = collection.find(textSearch).projection(projection).first();
    System.out.println("Highest scoring document: " + myDoc);

    // release resources
    mongoClient.close();
}

From source file:tour.PojoQuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 */// w ww .j av a 2  s.co m
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = MongoClients.create();
    } else {
        mongoClient = MongoClients.create(args[0]);
    }

    // create codec registry for POJOs
    CodecRegistry pojoCodecRegistry = fromRegistries(MongoClientSettings.getDefaultCodecRegistry(),
            fromProviders(PojoCodecProvider.builder().automatic(true).build()));

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb").withCodecRegistry(pojoCodecRegistry);

    // get a handle to the "people" collection
    MongoCollection<Person> collection = database.getCollection("people", Person.class);

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Person ada = new Person("Ada Byron", 20, new Address("St James Square", "London", "W1"));
    System.out.println("Original Person Model: " + ada);
    collection.insertOne(ada);

    // Person will now have an ObjectId
    System.out.println("Mutated Person Model: " + ada);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Person somebody = collection.find().first();
    System.out.println(somebody);

    // now, lets add some more people so we can explore queries and cursors
    List<Person> people = asList(
            new Person("Charles Babbage", 45, new Address("5 Devonshire Street", "London", "W11")),
            new Person("Alan Turing", 28, new Address("Bletchley Hall", "Bletchley Park", "MK12")),
            new Person("Timothy Berners-Lee", 61, new Address("Colehill", "Wimborne", null)));

    collection.insertMany(people);
    System.out.println("total # of people " + collection.countDocuments());

    System.out.println("");
    // lets get all the documents in the collection and print them out
    Block<Person> printBlock = new Block<Person>() {
        @Override
        public void apply(final Person person) {
            System.out.println(person);
        }
    };

    collection.find().forEach(printBlock);

    System.out.println("");
    // now use a query to get 1 document out
    somebody = collection.find(eq("address.city", "Wimborne")).first();
    System.out.println(somebody);

    System.out.println("");
    // now lets find every over 30
    collection.find(gt("age", 30)).forEach(printBlock);

    System.out.println("");
    // Update One
    collection.updateOne(eq("name", "Ada Byron"), combine(set("age", 23), set("name", "Ada Lovelace")));

    System.out.println("");
    // Update Many
    UpdateResult updateResult = collection.updateMany(not(eq("zip", null)), set("zip", null));
    System.out.println(updateResult.getModifiedCount());

    System.out.println("");
    // Replace One
    updateResult = collection.replaceOne(eq("name", "Ada Lovelace"), ada);
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("address.city", "Wimborne"));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(eq("address.city", "London"));
    System.out.println(deleteResult.getDeletedCount());

    // Clean up
    database.drop();

    // release resources
    mongoClient.close();
}