Example usage for com.mongodb.client MongoCollection find

List of usage examples for com.mongodb.client MongoCollection find

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection find.

Prototype

FindIterable<TDocument> find(ClientSession clientSession);

Source Link

Document

Finds all documents in the collection.

Usage

From source file:org.apache.eagle.alert.metadata.impl.MongoMetadataDaoImpl.java

License:Apache License

private <T> Map<String, T> maps(MongoCollection<Document> collection, Class<T> clz, String version) {
    BsonDocument doc = new BsonDocument();
    doc.append("version", new BsonString(version));

    Map<String, T> maps = new HashMap<String, T>();
    String mapKey = (clz == SpoutSpec.class) ? "topologyId" : "topologyName";
    collection.find(doc).forEach(new Block<Document>() {
        @Override/*from   w  w w  .j av  a 2  s .c  om*/
        public void apply(Document document) {
            String json = document.toJson();
            try {
                //Due to some field name in SpoutSpec contains dot(.) which is invalid Mongo Field name,
                // we need to transform the format while reading from Mongo.
                if (clz == SpoutSpec.class) {
                    Document doc = Document.parse(json);
                    String[] metadataMapArrays = { "kafka2TupleMetadataMap", "tuple2StreamMetadataMap",
                            "streamRepartitionMetadataMap" };
                    for (String metadataMapName : metadataMapArrays) {
                        ArrayList<Document> subDocs = (ArrayList) doc.get(metadataMapName);
                        doc.remove(metadataMapName);

                        Document replaceDoc = new Document();
                        for (Document subDoc : subDocs) {
                            replaceDoc.put((String) subDoc.get("topicName"), subDoc.get(metadataMapName));
                        }
                        doc.put(metadataMapName, replaceDoc);
                    }

                    json = doc.toJson();
                }
                T t = mapper.readValue(json, clz);
                maps.put(document.getString(mapKey), t);
            } catch (IOException e) {
                LOG.error("deserialize config item failed!", e);
            }
        }
    });

    return maps;
}

From source file:org.apache.eagle.alert.metadata.impl.MongoMetadataDaoImpl.java

License:Apache License

private <T> List<T> list(MongoCollection<Document> collection, Class<T> clz, String version) {
    BsonDocument doc = new BsonDocument();
    doc.append("version", new BsonString(version));

    List<T> result = new LinkedList<T>();
    collection.find(doc).map(new Function<Document, T>() {
        @Override/*  ww w .  j  a  va2  s .co m*/
        public T apply(Document t) {
            String json = t.toJson();
            try {
                return mapper.readValue(json, clz);
            } catch (IOException e) {
                LOG.error("deserialize config item failed!", e);
            }
            return null;
        }
    }).into(result);
    return result;
}

From source file:org.apache.metamodel.mongodb.mongo3.MongoDbDataContext.java

License:Apache License

@Override
protected Row executePrimaryKeyLookupQuery(Table table, List<SelectItem> selectItems, Column primaryKeyColumn,
        Object keyValue) {/*from   ww  w  . j a va 2 s .c om*/
    final MongoCollection<Document> collection = _mongoDb.getCollection(table.getName());

    List<FilterItem> whereItems = new ArrayList<FilterItem>();
    SelectItem selectItem = new SelectItem(primaryKeyColumn);
    FilterItem primaryKeyWhereItem = new FilterItem(selectItem, OperatorType.EQUALS_TO, keyValue);
    whereItems.add(primaryKeyWhereItem);
    final Document query = createMongoDbQuery(table, whereItems);
    final Document resultDoc = collection.find(query).first();

    DataSetHeader header = new SimpleDataSetHeader(selectItems);

    Row row = MongoDBUtils.toRow(resultDoc, header);

    return row;
}

From source file:org.apache.metamodel.mongodb.mongo3.MongoDbDataContext.java

License:Apache License

private MongoCursor<Document> getDocumentMongoCursor(Table table, List<FilterItem> whereItems, int firstRow,
        int maxRows) {
    final MongoCollection<Document> collection = _mongoDb.getCollection(table.getName());

    final Document query = createMongoDbQuery(table, whereItems);

    logger.info("Executing MongoDB 'find' query: {}", query);
    FindIterable<Document> iterable = collection.find(query);

    if (maxRows > 0) {
        iterable = iterable.limit(maxRows);
    }//from  w  w w.  ja v  a 2 s .  c om
    if (firstRow > 1) {
        final int skip = firstRow - 1;
        iterable = iterable.skip(skip);
    }

    return iterable.iterator();
}

From source file:org.apache.nifi.mongodb.MongoDBLookupService.java

License:Apache License

private Document findOne(Document query, Document projection) {
    MongoCollection col = controllerService.getDatabase(databaseName).getCollection(collection);
    MongoCursor<Document> it = (projection != null ? col.find(query).projection(projection) : col.find(query))
            .iterator();//  w  w  w .  j  a v a2  s .c  o  m
    Document retVal = it.hasNext() ? it.next() : null;
    it.close();

    return retVal;
}

From source file:org.apache.nifi.processors.mongodb.GetMongo.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();

    final Document query = context.getProperty(QUERY).isSet()
            ? Document.parse(context.getProperty(QUERY).evaluateAttributeExpressions().getValue())
            : null;/* w  w w. j  a v  a  2  s .  c o m*/
    final Document projection = context.getProperty(PROJECTION).isSet()
            ? Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions().getValue())
            : null;
    final Document sort = context.getProperty(SORT).isSet()
            ? Document.parse(context.getProperty(SORT).evaluateAttributeExpressions().getValue())
            : null;
    final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue();
    configureMapper(jsonTypeSetting);

    final MongoCollection<Document> collection = getCollection(context);

    try {
        final FindIterable<Document> it = query != null ? collection.find(query) : collection.find();
        if (projection != null) {
            it.projection(projection);
        }
        if (sort != null) {
            it.sort(sort);
        }
        if (context.getProperty(LIMIT).isSet()) {
            it.limit(context.getProperty(LIMIT).evaluateAttributeExpressions().asInteger());
        }
        if (context.getProperty(BATCH_SIZE).isSet()) {
            it.batchSize(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
        }

        final MongoCursor<Document> cursor = it.iterator();
        ComponentLog log = getLogger();
        try {
            FlowFile flowFile = null;
            if (context.getProperty(RESULTS_PER_FLOWFILE).isSet()) {
                int ceiling = context.getProperty(RESULTS_PER_FLOWFILE).evaluateAttributeExpressions()
                        .asInteger();
                List<Document> batch = new ArrayList<>();

                while (cursor.hasNext()) {
                    batch.add(cursor.next());
                    if (batch.size() == ceiling) {
                        try {
                            if (log.isDebugEnabled()) {
                                log.debug("Writing batch...");
                            }
                            String payload = buildBatch(batch, jsonTypeSetting);
                            writeBatch(payload, context, session);
                            batch = new ArrayList<>();
                        } catch (IOException ex) {
                            getLogger().error("Error building batch", ex);
                        }
                    }
                }
                if (batch.size() > 0) {
                    try {
                        writeBatch(buildBatch(batch, jsonTypeSetting), context, session);
                    } catch (IOException ex) {
                        getLogger().error("Error sending remainder of batch", ex);
                    }
                }
            } else {
                while (cursor.hasNext()) {
                    flowFile = session.create();
                    flowFile = session.write(flowFile, new OutputStreamCallback() {
                        @Override
                        public void process(OutputStream out) throws IOException {
                            String json;
                            if (jsonTypeSetting.equals(JSON_TYPE_STANDARD)) {
                                json = mapper.writerWithDefaultPrettyPrinter()
                                        .writeValueAsString(cursor.next());
                            } else {
                                json = cursor.next().toJson();
                            }
                            IOUtils.write(json, out);
                        }
                    });
                    flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(),
                            "application/json");

                    session.getProvenanceReporter().receive(flowFile, getURI(context));
                    session.transfer(flowFile, REL_SUCCESS);
                }
            }

            session.commit();

        } finally {
            cursor.close();
        }

    } catch (final RuntimeException e) {
        context.yield();
        session.rollback();
        logger.error("Failed to execute query {} due to {}", new Object[] { query, e }, e);
    }
}

From source file:org.apache.nifi.processors.mongodb.GetMongoRecord.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile input = null;//  w  w  w  .j a v  a  2s . c o m

    if (context.hasIncomingConnection()) {
        input = session.get();
        if (input == null && context.hasNonLoopConnection()) {
            return;
        }
    }

    final String database = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions(input).getValue();
    final String collection = context.getProperty(COLLECTION_NAME).evaluateAttributeExpressions(input)
            .getValue();
    final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(input).getValue();
    final Document query = getQuery(context, session, input);

    MongoCollection mongoCollection = clientService.getDatabase(database).getCollection(collection);

    FindIterable<Document> find = mongoCollection.find(query);
    if (context.getProperty(SORT).isSet()) {
        find = find
                .sort(Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue()));
    }
    if (context.getProperty(PROJECTION).isSet()) {
        find = find.projection(
                Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue()));
    }
    if (context.getProperty(LIMIT).isSet()) {
        find = find.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger());
    }

    MongoCursor<Document> cursor = find.iterator();

    FlowFile output = input != null ? session.create(input) : session.create();
    final FlowFile inputPtr = input;
    try {
        final Map<String, String> attributes = getAttributes(context, input, query, mongoCollection);
        try (OutputStream out = session.write(output)) {
            Map<String, String> attrs = inputPtr != null ? inputPtr.getAttributes()
                    : new HashMap<String, String>() {
                        {
                            put("schema.name", schemaName);
                        }
                    };
            RecordSchema schema = writerFactory.getSchema(attrs, null);
            RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, attrs);
            long count = 0L;
            writer.beginRecordSet();
            while (cursor.hasNext()) {
                Document next = cursor.next();
                if (next.get("_id") instanceof ObjectId) {
                    next.put("_id", next.get("_id").toString());
                }
                Record record = new MapRecord(schema, next);
                writer.write(record);
                count++;
            }
            writer.finishRecordSet();
            writer.close();
            out.close();
            attributes.put("record.count", String.valueOf(count));
        } catch (SchemaNotFoundException e) {
            throw new RuntimeException(e);
        }

        output = session.putAllAttributes(output, attributes);

        session.getProvenanceReporter().fetch(output, getURI(context));
        session.transfer(output, REL_SUCCESS);
        if (input != null) {
            session.transfer(input, REL_ORIGINAL);
        }
    } catch (Exception ex) {
        ex.printStackTrace();
        getLogger().error("Error writing record set from Mongo query.", ex);
        session.remove(output);
        if (input != null) {
            session.transfer(input, REL_FAILURE);
        }
    }
}

From source file:org.axonframework.mongo.eventsourcing.eventstore.AbstractMongoEventStorageStrategy.java

License:Apache License

@Override
public List<? extends DomainEventData<?>> findDomainEvents(MongoCollection<Document> collection,
        String aggregateIdentifier, long firstSequenceNumber, int batchSize) {
    FindIterable<Document> cursor = collection
            .find(and(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier),
                    gte(eventConfiguration.sequenceNumberProperty(), firstSequenceNumber)))
            .sort(new BasicDBObject(eventConfiguration().sequenceNumberProperty(), ORDER_ASC));
    cursor = cursor.batchSize(batchSize);
    return stream(cursor.spliterator(), false).flatMap(this::extractEvents)
            .filter(event -> event.getSequenceNumber() >= firstSequenceNumber).collect(Collectors.toList());
}

From source file:org.axonframework.mongo.eventsourcing.eventstore.AbstractMongoEventStorageStrategy.java

License:Apache License

@Override
public Optional<? extends DomainEventData<?>> findLastSnapshot(MongoCollection<Document> snapshotCollection,
        String aggregateIdentifier) {
    FindIterable<Document> cursor = snapshotCollection
            .find(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier))
            .sort(new BasicDBObject(eventConfiguration.sequenceNumberProperty(), ORDER_DESC)).limit(1);
    return stream(cursor.spliterator(), false).findFirst().map(this::extractSnapshot);
}

From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java

License:Apache License

/**
 * Returns the document of the given document identifier.
 *
 * @param databaseName the database/*from  w ww .j  a v  a 2s. co m*/
 * @param collectionName the collection
 * @param documentId the document identifier to query for
 * @return the document of the given identifier
 * @throws DatasourceException
 * @throws NotFoundException
 */
@Override
public String getById(String databaseName, String collectionName, String documentId)
        throws DatasourceException, NotFoundException {
    try {
        if (!databaseExists(databaseName)) {
            throw new NotFoundException("The database doesn't exist in the datasource");
        }
        if (!collectionExists(databaseName, collectionName)) {
            throw new NotFoundException("The collection doesn't exist in the datasource");
        }
        MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName);
        MongoCollection<Document> collection = mongoDatabase.getCollection(collectionName);
        Document query = new Document("_id", new ObjectId(documentId));
        if (collection.count(query) == 0) {
            throw new NotFoundException("The document doesn't exist in the datasource");
        }
        Document document = collection.find(query).first();
        document.remove("_id");
        return JSON.serialize(document);
    } catch (MongoException ex) {
        logger.error("An error occured while retrieving the document", ex);
        throw new DatasourceException("An error occured while retrieving the document");
    }
}