Example usage for com.mongodb.client MongoCursor next

List of usage examples for com.mongodb.client MongoCursor next

Introduction

In this page you can find the example usage for com.mongodb.client MongoCursor next.

Prototype

@Override
    TResult next();

Source Link

Usage

From source file:org.apache.nifi.processors.mongodb.GetMongoRecord.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile input = null;/*from ww w  .  ja  v  a 2s  .  c om*/

    if (context.hasIncomingConnection()) {
        input = session.get();
        if (input == null && context.hasNonLoopConnection()) {
            return;
        }
    }

    final String database = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions(input).getValue();
    final String collection = context.getProperty(COLLECTION_NAME).evaluateAttributeExpressions(input)
            .getValue();
    final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(input).getValue();
    final Document query = getQuery(context, session, input);

    MongoCollection mongoCollection = clientService.getDatabase(database).getCollection(collection);

    FindIterable<Document> find = mongoCollection.find(query);
    if (context.getProperty(SORT).isSet()) {
        find = find
                .sort(Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue()));
    }
    if (context.getProperty(PROJECTION).isSet()) {
        find = find.projection(
                Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue()));
    }
    if (context.getProperty(LIMIT).isSet()) {
        find = find.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger());
    }

    MongoCursor<Document> cursor = find.iterator();

    FlowFile output = input != null ? session.create(input) : session.create();
    final FlowFile inputPtr = input;
    try {
        final Map<String, String> attributes = getAttributes(context, input, query, mongoCollection);
        try (OutputStream out = session.write(output)) {
            Map<String, String> attrs = inputPtr != null ? inputPtr.getAttributes()
                    : new HashMap<String, String>() {
                        {
                            put("schema.name", schemaName);
                        }
                    };
            RecordSchema schema = writerFactory.getSchema(attrs, null);
            RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, attrs);
            long count = 0L;
            writer.beginRecordSet();
            while (cursor.hasNext()) {
                Document next = cursor.next();
                if (next.get("_id") instanceof ObjectId) {
                    next.put("_id", next.get("_id").toString());
                }
                Record record = new MapRecord(schema, next);
                writer.write(record);
                count++;
            }
            writer.finishRecordSet();
            writer.close();
            out.close();
            attributes.put("record.count", String.valueOf(count));
        } catch (SchemaNotFoundException e) {
            throw new RuntimeException(e);
        }

        output = session.putAllAttributes(output, attributes);

        session.getProvenanceReporter().fetch(output, getURI(context));
        session.transfer(output, REL_SUCCESS);
        if (input != null) {
            session.transfer(input, REL_ORIGINAL);
        }
    } catch (Exception ex) {
        ex.printStackTrace();
        getLogger().error("Error writing record set from Mongo query.", ex);
        session.remove(output);
        if (input != null) {
            session.transfer(input, REL_FAILURE);
        }
    }
}

From source file:org.apache.nifi.processors.mongodb.gridfs.DeleteGridFS.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile input = session.get();//from  www.ja  va  2  s  .  c  om
    if (input == null) {
        return;
    }

    final String deleteQuery = getQuery(context, input);
    final String queryAttribute = context.getProperty(QUERY_ATTRIBUTE).isSet()
            ? context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue()
            : null;
    GridFSBucket bucket = getBucket(input, context);

    try {
        Document query = Document.parse(deleteQuery);
        MongoCursor cursor = bucket.find(query).iterator();
        if (cursor.hasNext()) {
            GridFSFile file = (GridFSFile) cursor.next();
            bucket.delete(file.getObjectId());

            if (!StringUtils.isEmpty(queryAttribute)) {
                input = session.putAttribute(input, queryAttribute, deleteQuery);
            }

            session.transfer(input, REL_SUCCESS);
        } else {
            getLogger().error(String.format("Query %s did not delete anything in %s", deleteQuery,
                    bucket.getBucketName()));
            session.transfer(input, REL_FAILURE);
        }

        cursor.close();
    } catch (Exception ex) {
        getLogger().error(String.format("Error deleting using query: %s", deleteQuery), ex);
        session.transfer(input, REL_FAILURE);
    }
}

From source file:org.apache.nifi.processors.mongodb.gridfs.FetchGridFS.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile input = session.get();//from  ww w. j a  va2  s .  c o m
    if (input == null) {
        return;
    }

    final String operatingMode = context.getProperty(OPERATION_MODE).getValue();
    final Map<String, String> originalAttributes = input.getAttributes();

    String queryStr;
    try {
        queryStr = getQuery(session, context, input);
        if (StringUtils.isEmpty(queryStr)) {
            getLogger().error("No query could be found or built from the supplied input.");
            session.transfer(input, REL_FAILURE);
            return;
        }
    } catch (IOException ex) {
        getLogger().error("No query could be found from supplied input", ex);
        session.transfer(input, REL_FAILURE);
        return;
    }

    Document query = Document.parse(queryStr);

    try {
        final GridFSBucket bucket = getBucket(input, context);
        final String queryPtr = queryStr;
        final FlowFile parent = operatingMode.equals(MODE_ONE_COMMIT.getValue()) ? input : null;

        MongoCursor it = bucket.find(query).iterator();
        if (operatingMode.equals(MODE_MANY_COMMITS.getValue())) {
            session.transfer(input, REL_ORIGINAL);
            input = null;
        }

        while (it.hasNext()) {
            GridFSFile gridFSFile = (GridFSFile) it.next();
            handleFile(bucket, session, context, parent, gridFSFile, queryPtr);

            if (operatingMode.equals(MODE_MANY_COMMITS.getValue())) {
                session.commit();
            }
        }

        if (input != null) {
            session.transfer(input, REL_ORIGINAL);
        }
    } catch (Exception ex) {
        getLogger().error("An error occurred wile trying to run the query.", ex);
        if (input != null && operatingMode.equals(MODE_ONE_COMMIT.getValue())) {
            session.transfer(input, REL_FAILURE);
        } else if (input != null && operatingMode.equals(MODE_MANY_COMMITS.getValue())) {
            final String queryPtr = queryStr;
            FlowFile cloned = session.create();
            cloned = session.putAllAttributes(cloned, originalAttributes);
            cloned = session.write(cloned, out -> out.write(queryPtr.getBytes()));
            session.transfer(cloned, REL_FAILURE);
        }
    }
}

From source file:org.apache.nifi.processors.mongodb.gridfs.GridFSITTestBase.java

License:Apache License

public boolean fileHasProperties(String name, String bucketName, Map<String, String> attrs) {
    GridFSBucket bucket = GridFSBuckets.create(client.getDatabase(DB), bucketName);
    MongoCursor it = bucket.find(Document.parse(String.format("{ \"filename\": \"%s\" }", name))).iterator();
    boolean retVal = false;

    if (it.hasNext()) {
        GridFSFile file = (GridFSFile) it.next();
        Document metadata = file.getMetadata();
        if (metadata != null && metadata.size() == attrs.size()) {
            retVal = true;/*w w  w.  j ava 2 s  .co  m*/
            for (Map.Entry<String, Object> entry : metadata.entrySet()) {
                Object val = attrs.get(entry.getKey());
                if (val == null || !entry.getValue().equals(val)) {
                    retVal = false;
                    break;
                }
            }
        }
    }

    it.close();

    return retVal;
}

From source file:org.apache.nifi.processors.mongodb.RunMongoAggregation.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = null;//  w  ww.  j  a va  2s  .com
    if (context.hasIncomingConnection()) {
        flowFile = session.get();

        if (flowFile == null && context.hasNonLoopConnection()) {
            return;
        }
    }

    final String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile).getValue();
    final Boolean allowDiskUse = context.getProperty(ALLOW_DISK_USE).asBoolean();
    final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(flowFile)
            .getValue();
    final Integer batchSize = context.getProperty(BATCH_SIZE).asInteger();
    final Integer resultsPerFlowfile = context.getProperty(RESULTS_PER_FLOWFILE).asInteger();
    final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue();
    final String dateFormat = context.getProperty(DATE_FORMAT).evaluateAttributeExpressions(flowFile)
            .getValue();

    configureMapper(jsonTypeSetting, dateFormat);

    Map<String, String> attrs = new HashMap<>();
    if (queryAttr != null && queryAttr.trim().length() > 0) {
        attrs.put(queryAttr, query);
    }

    MongoCursor<Document> iter = null;

    try {
        MongoCollection<Document> collection = getCollection(context, flowFile);
        List<Bson> aggQuery = buildAggregationQuery(query);
        AggregateIterable<Document> it = collection.aggregate(aggQuery).allowDiskUse(allowDiskUse);
        ;
        it.batchSize(batchSize != null ? batchSize : 1);

        iter = it.iterator();
        List<Document> batch = new ArrayList<>();

        while (iter.hasNext()) {
            batch.add(iter.next());
            if (batch.size() == resultsPerFlowfile) {
                writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
                batch = new ArrayList<>();
            }
        }

        if (batch.size() > 0) {
            writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
        }

        if (flowFile != null) {
            session.transfer(flowFile, REL_ORIGINAL);
        }
    } catch (Exception e) {
        getLogger().error("Error running MongoDB aggregation query.", e);
        if (flowFile != null) {
            session.transfer(flowFile, REL_FAILURE);
        }
    } finally {
        if (iter != null) {
            iter.close();
        }
    }
}

From source file:org.apache.rya.indexing.geotemporal.mongo.MongoEventStorage.java

License:Apache License

@Override
public Collection<Event> search(final Optional<RyaURI> subject,
        final Optional<Collection<IndexingExpr>> geoFilters,
        final Optional<Collection<IndexingExpr>> temporalFilters) throws EventStorageException {
    requireNonNull(subject);/* www.ja v  a 2  s  .  c  om*/

    try {
        final Collection<IndexingExpr> geos = (geoFilters.isPresent() ? geoFilters.get() : new ArrayList<>());
        final Collection<IndexingExpr> tempos = (temporalFilters.isPresent() ? temporalFilters.get()
                : new ArrayList<>());
        final DBObject filterObj = queryAdapter.getFilterQuery(geos, tempos);

        final BasicDBObjectBuilder builder = BasicDBObjectBuilder.start(filterObj.toMap());
        if (subject.isPresent()) {
            builder.append(EventDocumentConverter.SUBJECT, subject.get().getData());
        }
        final MongoCursor<Document> results = mongo.getDatabase(ryaInstanceName).getCollection(COLLECTION_NAME)
                .find(BsonDocument.parse(builder.get().toString())).iterator();

        final List<Event> events = new ArrayList<>();
        while (results.hasNext()) {
            events.add(EVENT_CONVERTER.fromDocument(results.next()));
        }
        return events;
    } catch (final MongoException | DocumentConverterException | GeoTemporalIndexException e) {
        throw new EventStorageException("Could not get the Event.", e);
    }
}

From source file:org.axonframework.mongo.eventhandling.saga.repository.MongoSagaStore.java

License:Apache License

@Override
public Set<String> findSagas(Class<?> sagaType, AssociationValue associationValue) {
    final BasicDBObject value = associationValueQuery(sagaType, associationValue);

    MongoCursor<Document> dbCursor = mongoTemplate.sagaCollection().find(value)
            .projection(include("sagaIdentifier")).iterator();
    Set<String> found = new TreeSet<>();
    while (dbCursor.hasNext()) {
        found.add((String) dbCursor.next().get("sagaIdentifier"));
    }//from w  ww .jav a  2s .  co  m
    return found;
}

From source file:org.bananaforscale.cormac.dao.AbstractDataService.java

License:Apache License

/**
 * Returns all unique database names in a MongoDB data source.
 *
 * @return a {@link Set} of database names
 */// w ww .j  a  v a2  s .  c  o m
protected Set<String> getDatabaseNames() {
    final Set<String> dbSet = new HashSet<>();
    final MongoCursor<String> cursor = mongoClient.listDatabaseNames().iterator();
    while (cursor.hasNext()) {
        dbSet.add(cursor.next());
    }
    return dbSet;
}

From source file:org.bananaforscale.cormac.dao.AbstractDataService.java

License:Apache License

/**
 * Retrieves the names of the collections in a database.
 *
 * @param databaseName the name of the database
 * @return a {@link Set} of collection names
 *///from  w  ww  .jav  a2s  .  c  om
protected Set<String> getCollectionNames(final String databaseName) {
    final MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName);
    final Set<String> collectionSet = new HashSet<>();
    final MongoCursor<String> cursor = mongoDatabase.listCollectionNames().iterator();
    while (cursor.hasNext()) {
        collectionSet.add(cursor.next());
    }
    return collectionSet;
}

From source file:org.cleaner.mongo.FindDuplicatesByMd5.java

License:Open Source License

private void writeOutDuplicates(AggregateIterable<Document> aggregate) {
    MongoCursor<Document> iterator = aggregate.iterator();
    while (iterator.hasNext()) {
        duplicateStrategy.handleDuplicates(iterator.next(), gridFS);
    }//from ww w. j av  a2s  .c om
}