Example usage for com.mongodb.client MongoCollection find

List of usage examples for com.mongodb.client MongoCollection find

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection find.

Prototype

FindIterable<TDocument> find();

Source Link

Document

Finds all documents in the collection.

Usage

From source file:foam.dao.MongoDAO.java

License:Open Source License

@Override
public Sink select_(X x, Sink sink, long skip, long limit, Comparator order, Predicate predicate) {
    sink = prepareSink(sink);/*from   w  ww. j  a v a2 s .  c  o  m*/

    Sink decorated = decorateSink_(sink, skip, limit, order, predicate);
    Subscription sub = new Subscription();
    Logger logger = (Logger) x.get("logger");

    if (getOf() == null) {
        throw new IllegalArgumentException("`of` is not set");
    }

    MongoCollection<BsonDocument> collection = database.getCollection(collectionName, BsonDocument.class);
    MongoCursor<BsonDocument> cursor = collection.find().iterator();

    try {
        while (cursor.hasNext()) {
            if (sub.getDetached())
                break;

            FObject obj = createFObject(x, new BsonDocumentReader(cursor.next()), getOf().getObjClass(),
                    logger);

            if ((predicate == null) || predicate.f(obj)) {
                decorated.put(obj, sub);
            }
        }
    } finally {
        cursor.close();
    }

    decorated.eof();

    return sink;
}

From source file:fr.lirmm.graphik.graal.keyval.KeyValueStoreMongoDB.java

License:Open Source License

public void showCollection(MongoCollection<Document> col) {
    System.out.println(col.getNamespace().getCollectionName() + " : ");
    MongoCursor<Document> cursor = col.find().iterator();
    try {/*from ww w .ja  v  a2 s.  co m*/
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }
}

From source file:fucksocks.server.manager.MongoDBBasedUserManager.java

License:Apache License

@Override
public List<User> findAll() {
    if (mongoDBUtil == null) {
        mongoDBUtil = initMongoDBUtil();
    }//  ww  w  . j av a 2  s  . c  o m
    return mongoDBUtil.doJob(new MongoDBCallback<List<User>>() {
        @Override
        public List<User> process(MongoCollection<Document> collection) {
            FindIterable<Document> result = collection.find();
            List<User> users = new ArrayList<User>();
            for (Document document : result) {
                users.add(formUser(document));
            }
            return users;
        }
    });
}

From source file:geriapp.dao.ReadingDAO.java

public static Reading getLatestReading(String type) {//Change later to ID instead of type
    MongoClient mongo = new MongoClient("54.254.204.169", 27017);

    MongoDatabase db = mongo.getDatabase("GERI");

    MongoCollection<Document> newColl;

    Gson gson = new Gson();

    if (type.equals("medbox")) {
        newColl = db.getCollection("Medbox");
        Document latestEntry = newColl.find().iterator().next();
        String json = latestEntry.toJson();
        Reading reading = gson.fromJson(json, MedboxReading.class);
        mongo.close();//from w ww  .  j  ava 2s. co m
        return reading;
    } else if (type.equals("door")) {

    }
    return null; //throw Exception??
}

From source file:geriapp.dao.ReadingDAO.java

public static Reading getReadingsBetween(String type, Timestamp startTime, Timestamp endTime) {
    MongoClient mongo = new MongoClient("54.254.204.169", 27017);

    MongoDatabase db = mongo.getDatabase("GERI");

    MongoCollection<Document> newColl;

    Gson gson = new Gson();

    if (type.equals("medbox")) {
        newColl = db.getCollection("Medbox");
        Document latestEntry = newColl.find().iterator().next();
        String json = latestEntry.toJson();
        MedboxReading reading = gson.fromJson(json, MedboxReading.class);
        String thisTimestamp = reading.getGw_timestamp();
        DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date parsedTimestamp = null;
        try {// w w w.ja v  a 2s .  com
            parsedTimestamp = df.parse(thisTimestamp);
        } catch (ParseException e) {
            return null;
        }
        Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
        mongo.close();
        if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
            return reading;
        }
    } else if (type.equals("door")) {
        newColl = db.getCollection("Door");
        Document latestEntry = newColl.find().iterator().next();
        String json = latestEntry.toJson();
        DoorReading reading = gson.fromJson(json, DoorReading.class);
        String thisTimestamp = reading.getGw_timestamp();
        DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date parsedTimestamp = null;
        try {
            parsedTimestamp = df.parse(thisTimestamp);
        } catch (ParseException e) {
            return null;
        }
        Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
        mongo.close();
        if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
            return reading;
        }
    }
    return null; //throw Exception??
}

From source file:geriapp.dao.ReadingDAO.java

public static int getPastReadingsCountBetween(String type, Timestamp startTime, Timestamp endTime) {
    MongoClient mongo = new MongoClient("54.254.204.169", 27017);

    MongoDatabase db = mongo.getDatabase("GERI");
    int size = 0;
    MongoCollection<Document> newColl;

    Gson gson = new Gson();

    if (type.equals("medbox")) {
        newColl = db.getCollection("Medbox");
        MongoCursor<Document> iterator = newColl.find().iterator();
        Document latestEntry = null;
        boolean run = true;
        ArrayList<MedboxReading> results = new ArrayList<MedboxReading>();
        while (run) {
            latestEntry = iterator.next();
            if (latestEntry == null) {
                run = false;/*w  w w .  j  a v  a2  s .c o m*/
                size = 121;
                break;
            }
            String json = latestEntry.toJson();
            MedboxReading reading = gson.fromJson(json, MedboxReading.class);
            String thisTimestamp = reading.getGw_timestamp();

            DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            Date parsedTimestamp = null;
            try {
                parsedTimestamp = df.parse(thisTimestamp);
                //System.out.println(""+parsedTimestamp);
            } catch (ParseException e) {
                e.printStackTrace();
                run = false;
            }
            Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
            //if (gwTimestamp.after(startTime)) {
            if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
                results.add(reading);
            }
            if (!iterator.hasNext()) {
                run = false;
            }
        }
        /*
        while (iterator.hasNext()) {
        latestEntry = iterator.next();
        String json = latestEntry.toJson();
        MedboxReading reading = gson.fromJson(json, MedboxReading.class);
        results.add(reading);
        }
        */
        mongo.close();
        size = results.size();
        return size;
    } else if (type.equals("door")) {
        newColl = db.getCollection("Door");
        MongoCursor<Document> iterator = newColl.find().iterator();
        Document latestEntry = null;
        boolean run = true;
        ArrayList<DoorReading> results = new ArrayList<DoorReading>();
        while (run) {
            latestEntry = iterator.next();
            if (latestEntry == null) {
                run = false;
                size = 121;
                break;
            }
            String json = latestEntry.toJson();
            DoorReading reading = gson.fromJson(json, DoorReading.class);
            String thisTimestamp = reading.getGw_timestamp();

            DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            Date parsedTimestamp = null;
            try {
                parsedTimestamp = df.parse(thisTimestamp);
                //System.out.println(""+parsedTimestamp);
            } catch (ParseException e) {
                e.printStackTrace();
                run = false;
            }
            Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
            //if (gwTimestamp.after(startTime)) {
            if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
                results.add(reading);
            }
            if (!iterator.hasNext()) {
                run = false;
            }
        }
        /*
        while (iterator.hasNext()) {
        latestEntry = iterator.next();
        String json = latestEntry.toJson();
        MedboxReading reading = gson.fromJson(json, MedboxReading.class);
        results.add(reading);
        }
        */
        mongo.close();
        size = results.size();
        return size;
    }
    return size; //throw Exception??
}

From source file:henu.dao.impl.CaclDaoImpl.java

License:Open Source License

@Override
public List getCaclData(String cid) {
    MongoCollection<Document> collection = NosqlDB.getCollection(cid);

    /**//from  www  .  j av  a  2s .co m
     * 
     * 1. ?FindIterable<Document>
     * 2. ?MongoCursor<Document>
     * 3. ??? 
     *
     */
    List<String> data = new ArrayList();
    FindIterable<Document> findIterable = collection.find();
    MongoCursor<Document> mongoCursor = findIterable.iterator();

    while (mongoCursor.hasNext()) {
        data.add(mongoCursor.next().toJson());
        System.out.println(data);
    }

    return data;
}

From source file:if4031.ServerHandler.java

private int getLastMessageId(String channel) {
    MongoCollection<Document> collection = database.getCollection(channel + "Message");
    Document myDoc = collection.find().sort(descending("id")).first();
    return myDoc.getInteger("id");
}

From source file:info.bunji.mongodb.synces.OplogExtractor.java

License:Apache License

@Override
protected void execute() throws Exception {

    Set<String> includeFields = config.getIncludeFields();
    Set<String> excludeFields = config.getExcludeFields();
    String index = config.getDestDbName();
    String syncName = config.getSyncName();

    // oplog????/*from  w  w  w . jav  a2 s.c  om*/
    int checkPoint = 0;
    int retryCnt = 0;
    while (true) {
        try (MongoClient client = MongoClientService.getClient(config)) {
            retryCnt = 0;

            logger.info("[{}] starting oplog sync.", syncName);

            // check oplog timestamp outdated
            MongoCollection<Document> oplogCollection = client.getDatabase("local").getCollection("oplog.rs");
            FindIterable<Document> results;
            if (timestamp != null) {
                results = oplogCollection.find().filter(Filters.lte("ts", timestamp))
                        .sort(new Document("$natural", -1)).limit(1);
                if (results.first() == null) {
                    throw new IllegalStateException("[" + syncName + "] oplog outdated.["
                            + DocumentUtils.toDateStr(timestamp) + "(" + timestamp + ")]");
                }
                //logger.trace("[{}] start oplog timestamp = [{}]", config.getSyncName(), timestamp);
                //config.addSyncCount(-1);   // ?????????????

                BsonTimestamp tmpTs = results.first().get("ts", BsonTimestamp.class);
                if (!tmpTs.equals(timestamp)) {
                    // ?????mongo????????????
                    // ??????????
                    timestamp = tmpTs;

                    config.setStatus(Status.RUNNING);
                    config.setLastOpTime(timestamp);
                    append(SyncOperation.fromConfig(config));
                }
            }

            // oplog??
            targetDb = client.getDatabase(config.getMongoDbName());
            results = oplogCollection.find().filter(Filters.gte("ts", timestamp))
                    .sort(new Document("$natural", 1)).cursorType(CursorType.TailableAwait)
                    .noCursorTimeout(true).oplogReplay(true);

            logger.info("[{}] started oplog sync. [oplog {} ({})]", syncName,
                    DocumentUtils.toDateStr(timestamp), timestamp);

            // get document from oplog
            for (Document oplog : results) {

                // TODO ???SyncOperation???????
                SyncOperation op = null;
                timestamp = oplog.get("ts", BsonTimestamp.class);
                if (!"c".equals(oplog.get("op"))) {
                    //if (!Operation.COMMAND.equals(Operation.valueOf(oplog.get("op")))) {
                    // cmd
                    String ns = oplog.getString("ns");
                    String[] nsVals = ns.split("\\.", 2);
                    if (!config.getMongoDbName().equals(nsVals[0]) || !config.isTargetCollection(nsVals[1])) {
                        if (++checkPoint >= 10000) {
                            // ????????
                            config.setLastOpTime(timestamp);
                            op = SyncOperation.fromConfig(config);
                            checkPoint = 0; // clear check count
                            append(op);
                        }
                        continue;
                    } else {
                        op = new SyncOperation(oplog, index);
                        checkPoint = 0;
                    }
                } else {
                    // cmd??????????
                    op = new SyncOperation(oplog, index);
                    if (!config.getMongoDbName().equals(op.getSrcDbName())
                            || !config.isTargetCollection(op.getCollection())) {
                        checkPoint++;
                        continue;
                    }
                }

                /*
                               SyncOperation op = new SyncOperation(oplog, index);
                        
                               timestamp = op.getTimestamp();
                        
                               // check target database and collection
                               if(!config.getMongoDbName().equals(op.getSrcDbName()) || !config.isTargetCollection(op.getCollection())) {
                                  if (++checkPoint >= 10000) {
                                     // ????????
                                     config.setLastOpTime(timestamp);
                                     op = SyncOperation.fromConfig(config);
                                     checkPoint = 0;      // clear check count
                                     append(op);
                                  }
                                  continue;
                               } else {
                                  checkPoint = 0;
                               }
                */
                if (op.isPartialUpdate()) {
                    // get full document
                    MongoCollection<Document> collection = getMongoCollection(op.getCollection());
                    Document updateDoc = collection.find(oplog.get("o2", Document.class)).first();
                    if (updateDoc == null) {
                        checkPoint++;
                        continue; // deleted document
                    }
                    op.setDoc(updateDoc);
                }

                // filter document(insert or update)
                if (op.getDoc() != null) {
                    Document filteredDoc = DocumentUtils.applyFieldFilter(op.getDoc(), includeFields,
                            excludeFields);
                    if (filteredDoc.isEmpty()) {
                        checkPoint++;
                        continue; // no change sync fields
                    }
                    op.setDoc(filteredDoc);
                }

                // emit sync data
                append(op);
            }
        } catch (MongoClientException mce) {
            // do nothing.
        } catch (UnknownHostException | MongoSocketException mse) {
            retryCnt++;
            if (retryCnt >= MAX_RETRY) {
                logger.error(String.format("[%s] mongo connect failed. (RETRY=%d)", syncName, retryCnt), mse);
                throw mse;
            }
            long waitSec = (long) Math.min(60, Math.pow(2, retryCnt));
            logger.warn("[{}] waiting mongo connect retry. ({}/{}) [{}sec]", syncName, retryCnt, MAX_RETRY,
                    waitSec);

            Thread.sleep(waitSec * 1000);
        } catch (MongoInterruptedException mie) {
            // interrupt oplog tailable process.
            break;
        } catch (Throwable t) {
            logger.error(String.format("[%s] error. [msg:%s](%s)", syncName, t.getMessage(),
                    t.getClass().getSimpleName()), t);
            throw t;
        }
    }
}

From source file:info.bunji.mongodb.synces.StatusChecker.java

License:Apache License

/**
 **********************************//from   www.j  a v  a 2  s  . com
 * check sync status.
 * @throws Exception error occurred
 **********************************
 */
protected void checkStatus() throws Exception {
    Map<String, SyncConfig> configs = getConfigs();
    for (Entry<String, SyncConfig> entry : configs.entrySet()) {
        SyncConfig config = entry.getValue();
        AsyncProcess<SyncOperation> extractor = null;

        String syncName = config.getSyncName();
        if (config.getStatus() == null) {
            if (!isRunning(config.getSyncName())) {
                // initial import
                if (validateInitialImport(config)) {
                    try (MongoClient client = MongoClientService.getClient(config)) {
                        // get current oplog timestamp.
                        MongoCollection<Document> oplog = client.getDatabase("local").getCollection("oplog.rs");
                        Document lastOp = oplog.find().sort(new BasicDBObject("$natural", -1)).limit(1).first();
                        config.setLastOpTime(lastOp.get("ts", BsonTimestamp.class));
                    }
                    // create extractor for initial import.
                    config.setStatus(Status.INITIAL_IMPORTING);
                    extractor = new CollectionExtractor(config, null);
                    updateStatus(config, Status.INITIAL_IMPORTING, null);
                } else {
                    // faild initial import.
                    updateStatus(config, Status.INITIAL_IMPORT_FAILED, null);
                }
            }
        } else {
            switch (config.getStatus()) {

            case RUNNING:
                // restart sync. if indexer not running.
                if (!isRunning(syncName)) {
                    logger.debug("[{}] restart sync.", syncName);
                    // create extractor for oplog sync.
                    extractor = new OplogExtractor(config, config.getLastOpTime());
                }
                break;

            case INITIAL_IMPORT_FAILED:
            case START_FAILED:
            case STOPPED:
                if (isRunning(syncName)) {
                    SyncProcess indexer = indexerMap.remove(syncName);
                    indexer.stop();
                }
                break;

            default:
                // do nothing status
                // - INITIAL_IMPORTING
                // - WAITING_RETRY
                // - UNKNOWN
                break;
            }
        }

        if (extractor != null) {
            // start sync
            List<AsyncProcess<SyncOperation>> procList = new ArrayList<>();
            procList.add(extractor);
            if (extractor instanceof CollectionExtractor) {
                BsonTimestamp ts = config.getLastOpTime();
                procList.add(new OplogExtractor(config, ts));
            }
            AsyncResult<SyncOperation> result = AsyncExecutor.execute(procList, 1, syncQueueLimit);
            //AsyncResult<SyncOperation> result = AsyncExecutor.execute(procList, 1, syncQueueLimit, Schedulers.io());
            SyncProcess indexer = createSyncProcess(config, result);
            indexerMap.put(syncName, indexer);
            AsyncExecutor.execute(indexer);
            //AsyncExecutor.execute(indexer, Schedulers.computation());
        }
    }

    // stop indexer, if config not exists.
    for (String syncName : getIndexerNames()) {
        if (!configs.containsKey(syncName) && isRunning(syncName)) {
            getIndexer(syncName).stop();
        }
    }
    return;
}