Example usage for com.mongodb.client MongoDatabase getCollection

List of usage examples for com.mongodb.client MongoDatabase getCollection

Introduction

In this page you can find the example usage for com.mongodb.client MongoDatabase getCollection.

Prototype

MongoCollection<Document> getCollection(String collectionName);

Source Link

Document

Gets a collection.

Usage

From source file:geriapp.dao.ReadingDAO.java

public static Reading getReadingsBetween(String type, Timestamp startTime, Timestamp endTime) {
    MongoClient mongo = new MongoClient("54.254.204.169", 27017);

    MongoDatabase db = mongo.getDatabase("GERI");

    MongoCollection<Document> newColl;

    Gson gson = new Gson();

    if (type.equals("medbox")) {
        newColl = db.getCollection("Medbox");
        Document latestEntry = newColl.find().iterator().next();
        String json = latestEntry.toJson();
        MedboxReading reading = gson.fromJson(json, MedboxReading.class);
        String thisTimestamp = reading.getGw_timestamp();
        DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date parsedTimestamp = null;
        try {/*from w w  w  .jav  a2  s.  co  m*/
            parsedTimestamp = df.parse(thisTimestamp);
        } catch (ParseException e) {
            return null;
        }
        Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
        mongo.close();
        if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
            return reading;
        }
    } else if (type.equals("door")) {
        newColl = db.getCollection("Door");
        Document latestEntry = newColl.find().iterator().next();
        String json = latestEntry.toJson();
        DoorReading reading = gson.fromJson(json, DoorReading.class);
        String thisTimestamp = reading.getGw_timestamp();
        DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date parsedTimestamp = null;
        try {
            parsedTimestamp = df.parse(thisTimestamp);
        } catch (ParseException e) {
            return null;
        }
        Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
        mongo.close();
        if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
            return reading;
        }
    }
    return null; //throw Exception??
}

From source file:geriapp.dao.ReadingDAO.java

public static int getPastReadingsCountBetween(String type, Timestamp startTime, Timestamp endTime) {
    MongoClient mongo = new MongoClient("54.254.204.169", 27017);

    MongoDatabase db = mongo.getDatabase("GERI");
    int size = 0;
    MongoCollection<Document> newColl;

    Gson gson = new Gson();

    if (type.equals("medbox")) {
        newColl = db.getCollection("Medbox");
        MongoCursor<Document> iterator = newColl.find().iterator();
        Document latestEntry = null;
        boolean run = true;
        ArrayList<MedboxReading> results = new ArrayList<MedboxReading>();
        while (run) {
            latestEntry = iterator.next();
            if (latestEntry == null) {
                run = false;/*from   w ww . ja  v  a 2  s.  co  m*/
                size = 121;
                break;
            }
            String json = latestEntry.toJson();
            MedboxReading reading = gson.fromJson(json, MedboxReading.class);
            String thisTimestamp = reading.getGw_timestamp();

            DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            Date parsedTimestamp = null;
            try {
                parsedTimestamp = df.parse(thisTimestamp);
                //System.out.println(""+parsedTimestamp);
            } catch (ParseException e) {
                e.printStackTrace();
                run = false;
            }
            Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
            //if (gwTimestamp.after(startTime)) {
            if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
                results.add(reading);
            }
            if (!iterator.hasNext()) {
                run = false;
            }
        }
        /*
        while (iterator.hasNext()) {
        latestEntry = iterator.next();
        String json = latestEntry.toJson();
        MedboxReading reading = gson.fromJson(json, MedboxReading.class);
        results.add(reading);
        }
        */
        mongo.close();
        size = results.size();
        return size;
    } else if (type.equals("door")) {
        newColl = db.getCollection("Door");
        MongoCursor<Document> iterator = newColl.find().iterator();
        Document latestEntry = null;
        boolean run = true;
        ArrayList<DoorReading> results = new ArrayList<DoorReading>();
        while (run) {
            latestEntry = iterator.next();
            if (latestEntry == null) {
                run = false;
                size = 121;
                break;
            }
            String json = latestEntry.toJson();
            DoorReading reading = gson.fromJson(json, DoorReading.class);
            String thisTimestamp = reading.getGw_timestamp();

            DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            Date parsedTimestamp = null;
            try {
                parsedTimestamp = df.parse(thisTimestamp);
                //System.out.println(""+parsedTimestamp);
            } catch (ParseException e) {
                e.printStackTrace();
                run = false;
            }
            Timestamp gwTimestamp = new Timestamp(parsedTimestamp.getTime());
            //if (gwTimestamp.after(startTime)) {
            if (gwTimestamp.after(startTime) && gwTimestamp.before(endTime)) {
                results.add(reading);
            }
            if (!iterator.hasNext()) {
                run = false;
            }
        }
        /*
        while (iterator.hasNext()) {
        latestEntry = iterator.next();
        String json = latestEntry.toJson();
        MedboxReading reading = gson.fromJson(json, MedboxReading.class);
        results.add(reading);
        }
        */
        mongo.close();
        size = results.size();
        return size;
    }
    return size; //throw Exception??
}

From source file:henu.util.NosqlDB.java

public static MongoCollection<Document> getCollection(String collectionName) {

    try {//w w w  .j av  a  2 s . com

        MongoDatabase db = getMongoDataBase();

        MongoCollection<Document> collection = db.getCollection(collectionName);
        System.out.println("? " + collectionName + " ?!");
        return collection;
    } catch (Exception e) {
        System.err.println(e.getClass().getName() + ": " + e.getMessage());
    }
    return null;
}

From source file:info.bunji.mongodb.synces.CollectionExtractor.java

License:Apache License

@Override
protected void execute() throws Exception {

    Set<String> includeFields = config.getIncludeFields();
    Set<String> excludeFields = config.getExcludeFields();
    String index = config.getDestDbName();
    String syncName = config.getSyncName();

    // ???//from  w w w  .j  a  va2 s.co  m
    try (MongoClient client = MongoClientService.getClient(config)) {
        /*
                 if (timestamp == null) {
                    logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName());
                
                    // ??oplog???????
                    //BsonTimestamp lastOpTs = config.getLastOpTime();
                            
                    //logger.debug("[{}] current oplog timestamp = [{}]", syncName, lastOpTs.toString());
                
                    // ??????
                    MongoDatabase db = client.getDatabase(config.getMongoDbName());
                
                    // ????
                    Object lastId = null;
                    for (String collection : getTargetColectionList(db)) {
                       logger.info("[{}] start initial import. [{}]", syncName, collection);
                
                       MongoCollection<Document> conn = db.getCollection(collection);
                       BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId);
                
                       long count = conn.count(filter);
                
                       // get document from collection
                       long processed = 0;
                       FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1));
                       for (Document doc : results) {
          Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields);
          append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null));
          if ((++processed % LOGGING_INTERVAL) == 0) {
             logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed, count);
          }
                       }
                       logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed);
                    }
                 }
        */

        //=====================
        logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName());

        // ??????
        MongoDatabase db = client.getDatabase(config.getMongoDbName());

        // ????
        Object lastId = null;
        for (String collection : getTargetColectionList(db)) {
            logger.info("[{}] start initial import. [{}]", syncName, collection);

            MongoCollection<Document> conn = db.getCollection(collection);
            BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId);

            long count = conn.count(filter);

            long processed = 0;
            FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1));
            for (Document doc : results) {
                Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields);
                append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null));
                if ((++processed % LOGGING_INTERVAL) == 0) {
                    logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed,
                            count);
                }
            }
            logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed);
        }
        //=====================
        logger.info("[{}] finish import collection(s).", syncName);

        //         append(DocumentUtils.makeStatusOperation(Status.RUNNING, config, timestamp));
        config.setStatus(Status.RUNNING);
        config.setLastOpTime(timestamp);
        //         config.setLastSyncTime(timestamp);
        //         append(DocumentUtils.makeStatusOperation(config));
        append(SyncOperation.fromConfig(config));

    } catch (Throwable t) {
        config.setStatus(Status.INITIAL_IMPORT_FAILED);
        logger.error("[{}] initial import failed.({})", syncName, t.getMessage(), t);
        throw t;
    }
}

From source file:io.debezium.connector.mongodb.Replicator.java

License:Apache License

/**
 * Copy the collection, sending to the recorder a record for each document.
 * //from ww w  .j  a v a2  s .  c o  m
 * @param primary the connection to the replica set's primary node; may not be null
 * @param collectionId the identifier of the collection to be copied; may not be null
 * @param timestamp the timestamp in milliseconds at which the copy operation was started
 * @return number of documents that were copied
 * @throws InterruptedException if the thread was interrupted while the copy operation was running
 */
protected long copyCollection(MongoClient primary, CollectionId collectionId, long timestamp)
        throws InterruptedException {
    RecordsForCollection factory = recordMakers.forCollection(collectionId);
    MongoDatabase db = primary.getDatabase(collectionId.dbName());
    MongoCollection<Document> docCollection = db.getCollection(collectionId.name());
    long counter = 0;
    try (MongoCursor<Document> cursor = docCollection.find().iterator()) {
        while (cursor.hasNext()) {
            Document doc = cursor.next();
            logger.trace("Found existing doc in {}: {}", collectionId, doc);
            counter += factory.recordObject(collectionId, doc, timestamp);
        }
    }
    return counter;
}

From source file:io.dirigible.mongodb.jdbc.MongodbStatement.java

License:Apache License

/**
 * Input string: the document specification as defined in https://docs.mongodb.org/manual/reference/command/find/#dbcmd.find
 */// w  ww. j  a  v a2 s.c o  m
@Override
public ResultSet executeQuery(String sql) throws SQLException {
    MongoDatabase db = this.conn.getMongoDb();
    BsonDocument filterDocument = null;
    if (sql == null || sql.length() < 1)//that is a call to find() in terms of mongodb queries
        filterDocument = new BsonDocument();
    else
        filterDocument = BsonDocument.parse(sql);
    /*
     *  TODO: With 3.2 use https://docs.mongodb.org/manual/reference/command/find/#dbcmd.find instead
     *    Document response = this.conn.getMongoDb().runCommand(filterDocument); //MongoDB 3.2 only. Won't work on 3.0
     */
    String collectionName = filterDocument.containsKey("find") ? filterDocument.getString("find").getValue()
            : null;
    if (collectionName == null)
        collectionName = this.conn.getCollectionName();//fallback if any
    if (collectionName == null)
        throw new IllegalArgumentException("Specifying a collection is mandatory for query operations");
    BsonDocument filter = filterDocument.containsKey("filter") ? filterDocument.getDocument("filter") : null;

    FindIterable<Document> searchHits = null;
    if (filter == null)
        searchHits = db.getCollection(collectionName).find();
    else
        searchHits = db.getCollection(collectionName).find(filter);
    if (filterDocument.containsKey("batchSize"))
        searchHits.batchSize(filterDocument.getInt32("batchSize").getValue());
    if (filterDocument.containsKey("limit"))
        searchHits.limit(filterDocument.getInt32("limit").getValue());
    if (filterDocument.containsKey("sort"))
        searchHits.sort(filterDocument.getDocument("sort"));
    return new MongodbResultSet(this, searchHits);
}

From source file:io.djigger.collector.accessors.InstrumentationEventAccessor.java

License:Open Source License

public InstrumentationEventAccessor(MongoDatabase db) {
    super();/*  w  w  w.  j  a v a 2  s  . c o  m*/

    this.db = db;
    instrumentationEventsCollection = db.getCollection("instrumentation");
}

From source file:io.djigger.collector.accessors.MetricAccessor.java

License:Open Source License

public MetricAccessor(MongoDatabase db) {
    super();

    this.db = db;
    metricsCollection = db.getCollection("metrics");
}

From source file:io.djigger.collector.accessors.stackref.ThreadInfoAccessorImpl.java

License:Open Source License

public ThreadInfoAccessorImpl(MongoDatabase db) {
    super();// w w  w  . ja  va 2 s  .  co m
    this.db = db;
    threadInfoCollection = db.getCollection("threaddumps");
    stackTracesCollection = db.getCollection("stacktraces");
}

From source file:io.mandrel.metrics.impl.MongoMetricsRepository.java

License:Apache License

@PostConstruct
public void init() {
    MongoDatabase database = mongoClient.getDatabase(properties.getMongoClientDatabase());

    counters = database.getCollection("counters");

    MongoUtils.checkCapped(database, "timeseries", size, maxDocuments);
    timeseries = database.getCollection("timeseries");

    List<Document> indexes = Lists.newArrayList(database.getCollection("timeseries").listIndexes());
    List<String> indexNames = indexes.stream().map(doc -> doc.getString("name")).collect(Collectors.toList());
    if (!indexNames.contains(INDEX_NAME)) {
        log.warn("Index on field time and type is missing, creating it. Exisiting indexes: {}", indexes);
        database.getCollection("timeseries").createIndex(new Document("timestamp_hour", 1).append("type", 1),
                new IndexOptions().name(INDEX_NAME).unique(true));
    }/*from www  .  j a v a  2s.c  o  m*/
}