Example usage for com.mongodb BulkWriteOperation execute

List of usage examples for com.mongodb BulkWriteOperation execute

Introduction

In this page you can find the example usage for com.mongodb BulkWriteOperation execute.

Prototype

public BulkWriteResult execute() 

Source Link

Document

Execute the bulk write operation with the default write concern of the collection from which this came.

Usage

From source file:edu.csulaerp.db.ReferenceMongo.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes no args/*from ww w .  j  av  a2 s  .c om*/
 * @throws UnknownHostException if it cannot connect to a MongoDB instance at localhost:27017
 */
public static void main(final String[] args) throws UnknownHostException {
    // connect to the local database server
    MongoClient mongoClient = new MongoClient();

    /*
    // Authenticate - optional
    MongoCredential credential = MongoCredential.createMongoCRCredential(userName, database, password);
    MongoClient mongoClient = new MongoClient(new ServerAddress(), Arrays.asList(credential));
    */

    // get handle to "mydb"
    DB db = mongoClient.getDB("mydb");

    // get a list of the collections in this database and print them out
    Set<String> collectionNames = db.getCollectionNames();
    for (final String s : collectionNames) {
        System.out.println(s);
    }

    // get a collection object to work with
    DBCollection coll = db.getCollection("testCollection");

    // drop all the data in it
    coll.drop();

    // make a document and insert it
    BasicDBObject doc = new BasicDBObject("name", "MongoDB").append("type", "database").append("count", 1)
            .append("info", new BasicDBObject("x", 203).append("y", 102));

    coll.insert(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    DBObject myDoc = coll.findOne();
    System.out.println(myDoc);

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    for (int i = 0; i < 100; i++) {
        coll.insert(new BasicDBObject().append("i", i));
    }
    System.out
            .println("total # of documents after inserting 100 small ones (should be 101) " + coll.getCount());

    // lets get all the documents in the collection and print them out
    DBCursor cursor = coll.find();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    // now use a query to get 1 document out
    BasicDBObject query = new BasicDBObject("i", 71);
    cursor = coll.find(query);

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    // $ Operators are represented as strings
    query = new BasicDBObject("j", new BasicDBObject("$ne", 3)).append("k", new BasicDBObject("$gt", 10));

    cursor = coll.find(query);

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    // now use a range query to get a larger subset
    // find all where i > 50
    query = new BasicDBObject("i", new BasicDBObject("$gt", 50));
    cursor = coll.find(query);

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    query = new BasicDBObject("i", new BasicDBObject("$gt", 20).append("$lte", 30));
    cursor = coll.find(query);

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next());
        }
    } finally {
        cursor.close();
    }

    // Count all documents in a collection but take a maximum second to do so
    coll.find().maxTime(1, SECONDS).count();

    // Bulk operations
    BulkWriteOperation builder = coll.initializeOrderedBulkOperation();
    builder.insert(new BasicDBObject("_id", 1));
    builder.insert(new BasicDBObject("_id", 2));
    builder.insert(new BasicDBObject("_id", 3));

    builder.find(new BasicDBObject("_id", 1)).updateOne(new BasicDBObject("$set", new BasicDBObject("x", 2)));
    builder.find(new BasicDBObject("_id", 2)).removeOne();
    builder.find(new BasicDBObject("_id", 3)).replaceOne(new BasicDBObject("_id", 3).append("x", 4));

    BulkWriteResult result = builder.execute();
    System.out.println("Ordered bulk write result : " + result);

    // Unordered bulk operation - no guarantee of order of operation
    builder = coll.initializeUnorderedBulkOperation();
    builder.find(new BasicDBObject("_id", 1)).removeOne();
    builder.find(new BasicDBObject("_id", 2)).removeOne();

    result = builder.execute();
    System.out.println("Ordered bulk write result : " + result);

    // parallelScan
    ParallelScanOptions parallelScanOptions = ParallelScanOptions.builder().numCursors(3).batchSize(300)
            .build();

    List<Cursor> cursors = coll.parallelScan(parallelScanOptions);
    for (Cursor pCursor : cursors) {
        while (pCursor.hasNext()) {
            System.out.println(pCursor.next());
        }
    }

    // release resources
    db.dropDatabase();
    mongoClient.close();
}

From source file:edu.umass.cs.gnsserver.database.MongoRecords.java

License:Apache License

/**
 *
 * @param collectionName// ww w. j  a  v a  2 s.co  m
 * @param values
 * @throws FailedDBOperationException
 * @throws RecordExistsException
 */
public void bulkUpdate(String collectionName, Map<String, JSONObject> values)
        throws FailedDBOperationException, RecordExistsException {
    //String primaryKey = mongoCollectionSpecs.getCollectionSpec(collectionName).getPrimaryKey().getName();
    DBCollection collection = db.getCollection(collectionName);
    String primaryKey = mongoCollectionSpecs.getCollectionSpec(collectionName).getPrimaryKey().getName();
    db.requestEnsureConnection();
    BulkWriteOperation unordered = collection.initializeUnorderedBulkOperation();
    for (Map.Entry<String, JSONObject> entry : values.entrySet()) {
        BasicDBObject query = new BasicDBObject(primaryKey, entry.getKey());
        JSONObject value = entry.getValue();
        if (value != null) {
            DBObject document;
            try {
                document = (DBObject) JSON.parse(value.toString());
            } catch (Exception e) {
                throw new FailedDBOperationException(collectionName, "bulkUpdate",
                        "Unable to parse json" + e.getMessage());
            }
            unordered.find(query).upsert().replaceOne(document);
        } else {
            unordered.find(query).removeOne();
        }
    }
    // Maybe check the result?
    unordered.execute();
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBESFromFile.java

License:Open Source License

private static final void runOnce(final MongoDbAccess dbvitam)
        throws InterruptedException, InstantiationException, IllegalAccessException, IOException {
    System.out.println("Load starting... ");
    int nbThread = ingest.length;

    final long date11 = System.currentTimeMillis();
    if (ingest.length == 1) {
        final FileInputStream fstream = new FileInputStream(ingest[0]);
        final DataInputStream in = new DataInputStream(fstream);
        final BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String strLine;/*from   www  . j a v  a  2s  . c om*/
        int nb = 0;
        final HashMap<String, String> esIndex = new HashMap<>();
        BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
        while ((strLine = br.readLine()) != null) {
            final DBObject bson = (DBObject) JSON.parse(strLine);
            bulk.insert(bson);
            ElasticSearchAccess.addEsIndex(dbvitam, model, esIndex, bson);
            nb++;
            if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                BulkWriteResult result = bulk.execute();
                int check = result.getInsertedCount();
                if (check != nb) {
                    System.out.print("x");
                } else {
                    System.out.print(".");
                }
                bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                MainIngestFile.cptMaip.addAndGet(check);
                nb = 0;
            }
        }
        if (!esIndex.isEmpty()) {
            System.out.println("Last bulk ES");
            dbvitam.addEsEntryIndex(true, esIndex, model);
            esIndex.clear();
        }
        if (nb != 0) {
            bulk.execute();
            MainIngestFile.cptMaip.addAndGet(nb);
            nb = 0;
        }
    } else {
        // threads
        ExecutorService executorService = Executors.newFixedThreadPool(ingest.length + 1);
        for (int i = 0; i < ingest.length; i++) {
            MainIngestMDBESFromFile ingestrun = new MainIngestMDBESFromFile();
            ingestrun.file = ingest[i];
            executorService.execute(ingestrun);
        }
        // ES
        MainIngestMDBESFromFile ingestrun = new MainIngestMDBESFromFile();
        ingestrun.file = null;
        ingestrun.files = ingest;
        ingestrun.original = dbvitam;
        executorService.execute(ingestrun);

        executorService.shutdown();
        while (!executorService.awaitTermination(10000, TimeUnit.MILLISECONDS)) {
            ;
        }
        System.out.println("Load ended");
        final long nbBigM = dbvitam.getDaipSize();
        final long nbBigD = dbvitam.getPaipSize();
        System.out.println("\n Big Test (" + nbThread + " nb MAIP: " + MainIngestFile.cptMaip.get()
                + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));

        System.out.println("\nThread;nbLoad;nbTotal;Load");
        System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));
    }
    final long date12 = System.currentTimeMillis();
    MainIngestMDBESFromFile.loadt.set(date12 - date11);

    System.out.println("Load ended");
    /*
     * System.out.println("All elements\n================================================================");
     * DbVitam.printStructure(dbvitam);
     */
    final long nbBigM = dbvitam.getDaipSize();
    final long nbBigD = dbvitam.getPaipSize();
    System.out.println("\n Big Test (" + nbThread + " Threads chacune " + MainIngestFile.nb + " nb MAIP: "
            + MainIngestFile.cptMaip.get() + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
            + (MainIngestMDBESFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));

    System.out.println("\nThread;nbLoad;nbTotal;Load");
    System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
            + (MainIngestMDBESFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBESFromFile.java

License:Open Source License

@Override
public void run() {
    if (file == null) {
        // ES/*w w w  .j  a va  2s .c om*/
        //Thread.sleep(1000);
        try {
            for (int i = 0; i < files.length - 1; i++) {
                System.out.println("ESFile: " + files[i]);
                final HashMap<String, String> esIndex = new HashMap<>();
                final FileInputStream fstream = new FileInputStream(files[i]);
                final DataInputStream in = new DataInputStream(fstream);
                final BufferedReader br = new BufferedReader(new InputStreamReader(in));
                String strLine;
                // Read File Line By Line
                while ((strLine = br.readLine()) != null) {
                    final BSONObject bson = (BSONObject) JSON.parse(strLine);
                    ElasticSearchAccess.addEsIndex(original, model, esIndex, bson);
                }
                // Close the input stream
                br.close();
                in.close();
                fstream.close();
                if (!esIndex.isEmpty()) {
                    System.out.println("Last bulk ES");
                    original.addEsEntryIndex(true, esIndex, model);
                    esIndex.clear();
                }
            }
            // last file might contains already inserted but to be updated DAip
            int i = files.length - 1;
            System.out.println("ESFile: " + files[i]);
            final FileInputStream fstream = new FileInputStream(files[i]);
            final DataInputStream in = new DataInputStream(fstream);
            final BufferedReader br = new BufferedReader(new InputStreamReader(in));
            String strLine;
            // Read File Line By Line
            while ((strLine = br.readLine()) != null) {
                final BSONObject bson = (BSONObject) JSON.parse(strLine);
                ElasticSearchAccess.addEsIndex(original, model, bson);
            }
            // Close the input stream
            br.close();
            in.close();
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        return;
    }
    MongoDbAccess dbvitam = null;
    FileInputStream fstream = null;
    DataInputStream in = null;
    final BufferedReader br;
    try {
        System.out.println("MDFile: " + file);
        fstream = new FileInputStream(file);
        in = new DataInputStream(fstream);
        br = new BufferedReader(new InputStreamReader(in));
        dbvitam = new MongoDbAccess(mongoClient, database, esbase, unicast, false);
        // now ingest metaaip/metafield/data
        final long date11 = System.currentTimeMillis();
        String strLine;
        int nb = 0;

        if (false) {
            // Tokumx
            List<DBObject> inserts = new ArrayList<DBObject>(GlobalDatas.LIMIT_MDB_NEW_INDEX);
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                inserts.add(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    WriteResult result = dbvitam.daips.collection.insert(inserts);
                    if (result.getN() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    inserts.clear();
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                WriteResult result = dbvitam.daips.collection.insert(inserts);
                if (result.getN() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                inserts.clear();
                nb = 0;
            }
        } else {
            BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                bulk.insert(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    BulkWriteResult result = bulk.execute();
                    bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                    if (result.getInsertedCount() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                BulkWriteResult result = bulk.execute();
                if (result.getInsertedCount() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                nb = 0;
            }
        }
        final long date12 = System.currentTimeMillis();
        loadt.addAndGet(date12 - date11);
        return;
    } catch (final InvalidUuidOperationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (final FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        // release resources
        try {
            in.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        try {
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (dbvitam != null) {
            dbvitam.close();
        }
    }
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBFromFile.java

License:Open Source License

private static final void runOnce(final MongoDbAccess dbvitam)
        throws InterruptedException, InstantiationException, IllegalAccessException, IOException {
    System.out.println("Load starting... ");
    int nbThread = ingest.length;

    final long date11 = System.currentTimeMillis();
    if (ingest.length == 1) {
        final FileInputStream fstream = new FileInputStream(ingest[0]);
        final DataInputStream in = new DataInputStream(fstream);
        final BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String strLine;// www .j ava2s. c o m
        int nb = 0;
        BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
        while ((strLine = br.readLine()) != null) {
            final DBObject bson = (DBObject) JSON.parse(strLine);
            bulk.insert(bson);
            nb++;
            if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                BulkWriteResult result = bulk.execute();
                int check = result.getInsertedCount();
                if (check != nb) {
                    System.out.print("x");
                } else {
                    System.out.print(".");
                }
                bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                MainIngestFile.cptMaip.addAndGet(check);
                nb = 0;
            }
        }
        if (nb != 0) {
            bulk.execute();
            MainIngestFile.cptMaip.addAndGet(nb);
            nb = 0;
        }
    } else {
        // threads
        ExecutorService executorService = Executors.newFixedThreadPool(ingest.length);
        for (int i = 0; i < ingest.length; i++) {
            MainIngestMDBFromFile ingestrun = new MainIngestMDBFromFile();
            ingestrun.file = ingest[i];
            executorService.execute(ingestrun);
            Thread.sleep(200);
        }
        Thread.sleep(1000);
        executorService.shutdown();
        while (!executorService.awaitTermination(10000, TimeUnit.MILLISECONDS)) {
            ;
        }
        System.out.println("Load ended");
        final long nbBigM = dbvitam.getDaipSize();
        final long nbBigD = dbvitam.getPaipSize();
        System.out.println("\n Big Test (" + nbThread + " nb MAIP: " + MainIngestFile.cptMaip.get()
                + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));

        System.out.println("\nThread;nbLoad;nbTotal;Load");
        System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));
    }
    final long date12 = System.currentTimeMillis();
    MainIngestMDBFromFile.loadt.set(date12 - date11);

    System.out.println("Load ended");
    /*
     * System.out.println("All elements\n================================================================");
     * DbVitam.printStructure(dbvitam);
     */
    final long nbBigM = dbvitam.getDaipSize();
    final long nbBigD = dbvitam.getPaipSize();
    System.out.println("\n Big Test (" + nbThread + " Threads chacune " + MainIngestFile.nb + " nb MAIP: "
            + MainIngestFile.cptMaip.get() + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
            + (MainIngestMDBFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));

    System.out.println("\nThread;nbLoad;nbTotal;Load");
    System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
            + (MainIngestMDBFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBFromFile.java

License:Open Source License

@Override
public void run() {
    MongoDbAccess dbvitam = null;//from  ww  w  . j ava 2s .c om
    FileInputStream fstream = null;
    DataInputStream in = null;
    final BufferedReader br;
    try {
        fstream = new FileInputStream(file);
        in = new DataInputStream(fstream);
        br = new BufferedReader(new InputStreamReader(in));
        dbvitam = new MongoDbAccess(mongoClient, database, esbase, unicast, false);
        // now ingest metaaip/metafield/data
        final long date11 = System.currentTimeMillis();
        String strLine;
        int nb = 0;

        if (false) {
            // Tokumx
            List<DBObject> inserts = new ArrayList<DBObject>(GlobalDatas.LIMIT_MDB_NEW_INDEX);
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                inserts.add(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    WriteResult result = dbvitam.daips.collection.insert(inserts);
                    if (result.getN() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    inserts.clear();
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                WriteResult result = dbvitam.daips.collection.insert(inserts);
                if (result.getN() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                inserts.clear();
                nb = 0;
            }
        } else {
            BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                bulk.insert(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    BulkWriteResult result = bulk.execute();
                    bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                    if (result.getInsertedCount() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                BulkWriteResult result = bulk.execute();
                if (result.getInsertedCount() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                nb = 0;
            }
        }
        final long date12 = System.currentTimeMillis();
        loadt.addAndGet(date12 - date11);
        return;
    } catch (final InvalidUuidOperationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (final FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        // release resources
        try {
            in.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        try {
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (dbvitam != null) {
            dbvitam.close();
        }
    }
}

From source file:io.hipstogram.trident.mongodb.MongoDBMapState.java

License:Apache License

@Override
public void multiPut(List<List<Object>> keys, List<T> values) {
    LOG.debug("Putting the following keys: {} with values: {}", keys, values);
    try {//  ww w .  j  a v a2  s. co  m
        BulkWriteOperation builder = coll.initializeOrderedBulkOperation();

        // Retrieve the mapping statement for the key,val pair
        for (int i = 0; i < keys.size(); i++) {
            List<Object> key = keys.get(i);
            T val = values.get(i);
            CRUDOperation operation = mapper.map(key, val);
            operation.addToBulkOperation(builder);
        }

        builder.execute();

        _mwrites.incrBy(keys.size());
    } catch (Exception e) {
        LOG.error("Exception {} caught.", e);
    }
}

From source file:io.hipstogram.trident.mongodb.MongoDBState.java

License:Apache License

@Override
public void commit(Long txid) {
    LOG.debug("Commiting [{}]", txid);
    DBCollection coll = client.getCollection(configuration);
    BulkWriteOperation builder = coll.initializeOrderedBulkOperation();

    int i = 0;//from w ww  .  j a  va2  s.  c om
    for (CRUDOperation operation : this.operations) {
        operation.addToBulkOperation(builder);
        i++;
        if (i >= this.maxBatchSize) {
            builder.execute();
            builder = coll.initializeOrderedBulkOperation();
            i = 0;
        }
    }

    builder.execute();
}

From source file:mongodb.performance.MongoDBPerformance.java

/**
 * @param args the command line arguments
 */// w  ww.jav  a  2s .co  m
public static void main(String[] args) throws UnknownHostException, FileNotFoundException, IOException {
    if (args.length == 0) {
        System.out.println("Parmetro no informado!");
        System.exit(-1);
    }
    System.out.println("Parmetro: " + args[0]);

    MongoClient mongoClient = new MongoClient();
    //MongoClient mongoClient = new MongoClient( "54.172.218.64" , 27017 );
    DB db = mongoClient.getDB("myDatabase");

    DBCollection collection = db.getCollection("ads");
    collection.drop();

    BulkWriteOperation builder = collection.initializeUnorderedBulkOperation();

    FileInputStream fileInputStream = new FileInputStream(".\\resources\\MongoDB" + args[0] + ".txt");
    BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fileInputStream));
    // Insert
    // Time start    
    long start = System.currentTimeMillis();

    String line;
    while ((line = bufferedReader.readLine()) != null) {
        DBObject bson = (DBObject) JSON.parse(line);
        builder.insert(bson);
    }
    bufferedReader.close();
    builder.execute();
    //Time end
    long elapsed = System.currentTimeMillis() - start;
    System.out.println("[insert] Time elapsed: " + elapsed + " ms");

    // Update
    // Time start    
    start = System.currentTimeMillis();
    collection.updateMulti(new BasicDBObject(),
            new BasicDBObject("$set", new BasicDBObject().append("ano", 2006)));
    // Time end
    elapsed = System.currentTimeMillis() - start;
    System.out.println("[update] Time elapsed: " + elapsed + " ms");

    // Select
    // Time start    
    start = System.currentTimeMillis();
    BasicDBObject keys = new BasicDBObject();
    keys.put("_id", 1);
    keys.put("modeloCarro.marca", 1);
    keys.put("modeloCarro.nome", 1);
    keys.put("uf", 1);
    keys.put("placa_carro", 1);
    keys.put("qtd_portas", 1);
    keys.put("cambio", 1);
    keys.put("combustivel", 1);
    keys.put("cor", 1);
    keys.put("km", 1);
    keys.put("valor", 1);
    keys.put("detalhe", 1);
    BasicDBObject sort = new BasicDBObject("_id", 1);

    DBCursor cursor = collection.find(new BasicDBObject(), keys).sort(sort);
    while (cursor.hasNext()) {
        cursor.next();
    }
    // Time end
    elapsed = System.currentTimeMillis() - start;
    System.out.println("[select] Time elapsed: " + elapsed + " ms");

    // Delete
    // Time start    
    start = System.currentTimeMillis();
    collection.remove(new BasicDBObject());
    // Time end
    elapsed = System.currentTimeMillis() - start;
    System.out.println("[delete] Time elapsed: " + elapsed + " ms");
}

From source file:org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore.java

License:Apache License

private <T extends Document> BulkUpdateResult sendBulkUpdate(Collection<T> collection,
        java.util.Collection<UpdateOp> updateOps, Map<String, T> oldDocs) {
    DBCollection dbCollection = getDBCollection(collection);
    BulkWriteOperation bulk = dbCollection.initializeUnorderedBulkOperation();
    String[] bulkIds = new String[updateOps.size()];
    int i = 0;/*from   w  w w.  ja  v  a  2 s.  co  m*/
    for (UpdateOp updateOp : updateOps) {
        String id = updateOp.getId();
        QueryBuilder query = createQueryForUpdate(id, updateOp.getConditions());
        T oldDoc = oldDocs.get(id);
        DBObject update;
        if (oldDoc == null) {
            query.and(Document.MOD_COUNT).exists(false);
            update = createUpdate(updateOp, true);
        } else {
            query.and(Document.MOD_COUNT).is(oldDoc.getModCount());
            update = createUpdate(updateOp, false);
        }
        bulk.find(query.get()).upsert().updateOne(update);
        bulkIds[i++] = id;
    }

    BulkWriteResult bulkResult;
    Set<String> failedUpdates = new HashSet<String>();
    Set<String> upserts = new HashSet<String>();
    try {
        bulkResult = bulk.execute();
    } catch (BulkWriteException e) {
        bulkResult = e.getWriteResult();
        for (BulkWriteError err : e.getWriteErrors()) {
            failedUpdates.add(bulkIds[err.getIndex()]);
        }
    }
    for (BulkWriteUpsert upsert : bulkResult.getUpserts()) {
        upserts.add(bulkIds[upsert.getIndex()]);
    }
    return new BulkUpdateResult(failedUpdates, upserts);
}