Example usage for com.mongodb DBCursor close

List of usage examples for com.mongodb DBCursor close

Introduction

In this page you can find the example usage for com.mongodb DBCursor close.

Prototype

@Override
    public void close() 

Source Link

Usage

From source file:org.anyframe.logmanager.bundle.core.LogCollectionManager.java

License:Apache License

/**
 * @throws Exception/*ww w  . j av  a2s .com*/
 */
public static void startManager() throws Exception {
    BasicDBObject query = new BasicDBObject();
    query.put("agentId", agentId);
    query.put("status", LogManagerConstant.APP_STATUS_ACTIVE);
    DBCursor appCursor = logApplication.find(query);

    logger.info("Application count is {}", appCursor.count());
    if (appCursor.count() > 0) {

        if (timer != null)
            timer.cancel();
        timer = new Timer();

        Iterator<DBObject> i = appCursor.iterator();
        while (i.hasNext()) {
            String appName = i.next().get("appName").toString();
            logger.info("Application name is {}", appName);

            DBCursor logCollectionCursor = logCollection.find(new BasicDBObject("appName", appName)
                    .append("agentId", agentId).append("setLogCollectionActive", true));

            while (logCollectionCursor.hasNext()) {
                setTimerTask(logCollectionCursor.next(), appName);
            }
            logCollectionCursor.close();
        }
    }
    appCursor.close();
    logger.info("HarvestManager is started.");
    updateAgentInfo(LogManagerConstant.AGENT_STATUS_ACTIVE);
}

From source file:org.apache.camel.component.gridfs.GridFsConsumer.java

License:Apache License

@Override
public void run() {
    DBCursor c = null;
    java.util.Date fromDate = null;

    QueryStrategy s = endpoint.getQueryStrategy();
    boolean usesTimestamp = (s != QueryStrategy.FileAttribute);
    boolean persistsTimestamp = (s == QueryStrategy.PersistentTimestamp
            || s == QueryStrategy.PersistentTimestampAndFileAttribute);
    boolean usesAttribute = (s == QueryStrategy.FileAttribute || s == QueryStrategy.TimeStampAndFileAttribute
            || s == QueryStrategy.PersistentTimestampAndFileAttribute);

    DBCollection ptsCollection = null;/*from  w  w  w .j  a v  a 2s  . co  m*/
    DBObject persistentTimestamp = null;
    if (persistsTimestamp) {
        ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection());
        // ensure standard indexes as long as collections are small
        try {
            if (ptsCollection.count() < 1000) {
                ptsCollection.createIndex(new BasicDBObject("id", 1));
            }
        } catch (MongoException e) {
            //TODO: Logging
        }
        persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject()));
        if (persistentTimestamp == null) {
            persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject());
            fromDate = new java.util.Date();
            persistentTimestamp.put("timestamp", fromDate);
            ptsCollection.save(persistentTimestamp);
        }
        fromDate = (java.util.Date) persistentTimestamp.get("timestamp");
    } else if (usesTimestamp) {
        fromDate = new java.util.Date();
    }
    try {
        Thread.sleep(endpoint.getInitialDelay());
        while (isStarted()) {
            if (c == null || c.getCursorId() == 0) {
                if (c != null) {
                    c.close();
                }
                String queryString = endpoint.getQuery();
                DBObject query;
                if (queryString == null) {
                    query = new BasicDBObject();
                } else {
                    query = (DBObject) JSON.parse(queryString);
                }
                if (usesTimestamp) {
                    query.put("uploadDate", new BasicDBObject("$gt", fromDate));
                }
                if (usesAttribute) {
                    query.put(endpoint.getFileAttributeName(), null);
                }
                c = endpoint.getFilesCollection().find(query);
            }
            boolean dateModified = false;
            while (c.hasNext() && isStarted()) {
                GridFSDBFile file = (GridFSDBFile) c.next();
                GridFSDBFile forig = file;
                if (usesAttribute) {
                    file.put(endpoint.getFileAttributeName(), "processing");
                    DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null)
                            .get();
                    forig = (GridFSDBFile) endpoint.getFilesCollection().findAndModify(q, null, null, false,
                            file, true, false);
                }
                if (forig != null) {
                    file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));

                    Exchange exchange = endpoint.createExchange();
                    exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA,
                            JSON.serialize(file.getMetaData()));
                    exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
                    exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
                    exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
                    exchange.getIn().setBody(file.getInputStream(), InputStream.class);
                    try {
                        getProcessor().process(exchange);
                        //System.out.println("Processing " + file.getFilename());
                        if (usesAttribute) {
                            forig.put(endpoint.getFileAttributeName(), "done");
                            endpoint.getFilesCollection().save(forig);
                        }
                        if (usesTimestamp) {
                            if (file.getUploadDate().compareTo(fromDate) > 0) {
                                fromDate = file.getUploadDate();
                                dateModified = true;
                            }
                        }
                    } catch (Exception e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                }
            }
            if (persistsTimestamp && dateModified) {
                persistentTimestamp.put("timestamp", fromDate);
                ptsCollection.save(persistentTimestamp);
            }
            Thread.sleep(endpoint.getDelay());
        }
    } catch (Throwable e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    if (c != null) {
        c.close();
    }
}

From source file:org.apache.camel.component.mongodb.MongoDbProducer.java

License:Apache License

protected void doFindAll(Exchange exchange) throws Exception {
    DBCollection dbCol = calculateCollection(exchange);
    // do not use getMandatoryBody, because if the body is empty we want to retrieve all objects in the collection
    DBObject query = null;/*w ww  .  j  a v  a 2s .c om*/
    // do not run around looking for a type converter unless there is a need for it
    if (exchange.getIn().getBody() != null) {
        query = exchange.getIn().getBody(DBObject.class);
    }
    DBObject fieldFilter = exchange.getIn().getHeader(MongoDbConstants.FIELDS_FILTER, DBObject.class);

    // get the batch size and number to skip
    Integer batchSize = exchange.getIn().getHeader(MongoDbConstants.BATCH_SIZE, Integer.class);
    Integer numToSkip = exchange.getIn().getHeader(MongoDbConstants.NUM_TO_SKIP, Integer.class);
    Integer limit = exchange.getIn().getHeader(MongoDbConstants.LIMIT, Integer.class);
    DBObject sortBy = exchange.getIn().getHeader(MongoDbConstants.SORT_BY, DBObject.class);
    DBCursor ret = null;
    try {
        if (query == null && fieldFilter == null) {
            ret = dbCol.find(new BasicDBObject());
        } else if (fieldFilter == null) {
            ret = dbCol.find(query);
        } else {
            ret = dbCol.find(query, fieldFilter);
        }

        if (sortBy != null) {
            ret.sort(sortBy);
        }

        if (batchSize != null) {
            ret.batchSize(batchSize.intValue());
        }

        if (numToSkip != null) {
            ret.skip(numToSkip.intValue());
        }

        if (limit != null) {
            ret.limit(limit.intValue());
        }

        Message resultMessage = prepareResponseMessage(exchange, MongoDbOperation.findAll);
        resultMessage.setBody(ret.toArray());
        resultMessage.setHeader(MongoDbConstants.RESULT_TOTAL_SIZE, ret.count());
        resultMessage.setHeader(MongoDbConstants.RESULT_PAGE_SIZE, ret.size());

    } catch (Exception e) {
        // rethrow the exception
        throw e;
    } finally {
        // make sure the cursor is closed
        if (ret != null) {
            ret.close();
        }
    }

}

From source file:org.apache.felix.useradmin.mongodb.MongoDBStore.java

License:Apache License

@Override
public Role[] getRoles(String filterValue) throws InvalidSyntaxException, MongoException {
    List<Role> roles = new ArrayList<Role>();

    Filter filter = null;/*from www.  j a  v  a  2s  .c  o  m*/
    if (filterValue != null) {
        filter = FrameworkUtil.createFilter(filterValue);
    }

    DBCollection coll = getCollection();

    DBCursor cursor = coll.find();
    try {
        while (cursor.hasNext()) {
            // Hmm, there might be a more clever way of doing this...
            Role role = m_helper.deserialize(cursor.next());
            if ((filter == null) || filter.match(role.getProperties())) {
                roles.add(role);
            }
        }
    } finally {
        cursor.close();
    }

    return roles.toArray(new Role[roles.size()]);
}

From source file:org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore.java

License:Apache License

@SuppressWarnings("unchecked")
@Nonnull/*from  w w  w  .  j av  a  2  s  .  com*/
<T extends Document> List<T> queryInternal(Collection<T> collection, String fromKey, String toKey,
        String indexedProperty, long startValue, int limit, long maxQueryTime) {
    log("query", fromKey, toKey, indexedProperty, startValue, limit);
    DBCollection dbCollection = getDBCollection(collection);
    QueryBuilder queryBuilder = QueryBuilder.start(Document.ID);
    queryBuilder.greaterThan(fromKey);
    queryBuilder.lessThan(toKey);

    DBObject hint = new BasicDBObject(NodeDocument.ID, 1);

    if (indexedProperty != null) {
        if (NodeDocument.DELETED_ONCE.equals(indexedProperty)) {
            if (startValue != 1) {
                throw new DocumentStoreException("unsupported value for property " + NodeDocument.DELETED_ONCE);
            }
            queryBuilder.and(indexedProperty);
            queryBuilder.is(true);
        } else {
            queryBuilder.and(indexedProperty);
            queryBuilder.greaterThanEquals(startValue);

            if (NodeDocument.MODIFIED_IN_SECS.equals(indexedProperty) && canUseModifiedTimeIdx(startValue)) {
                hint = new BasicDBObject(NodeDocument.MODIFIED_IN_SECS, -1);
            }
        }
    }
    DBObject query = queryBuilder.get();
    String parentId = Utils.getParentIdFromLowerLimit(fromKey);
    long lockTime = -1;
    final Stopwatch watch = startWatch();

    boolean isSlaveOk = false;
    int resultSize = 0;
    CacheChangesTracker cacheChangesTracker = null;
    if (parentId != null && collection == Collection.NODES) {
        cacheChangesTracker = nodesCache.registerTracker(fromKey, toKey);
    }
    try {
        DBCursor cursor = dbCollection.find(query).sort(BY_ID_ASC);
        if (!disableIndexHint && !hasModifiedIdCompoundIndex) {
            cursor.hint(hint);
        }
        if (maxQueryTime > 0) {
            // OAK-2614: set maxTime if maxQueryTimeMS > 0
            cursor.maxTime(maxQueryTime, TimeUnit.MILLISECONDS);
        }
        ReadPreference readPreference = getMongoReadPreference(collection, parentId, null,
                getDefaultReadPreference(collection));

        if (readPreference.isSlaveOk()) {
            isSlaveOk = true;
            LOG.trace("Routing call to secondary for fetching children from [{}] to [{}]", fromKey, toKey);
        }

        cursor.setReadPreference(readPreference);

        List<T> list;
        try {
            list = new ArrayList<T>();
            for (int i = 0; i < limit && cursor.hasNext(); i++) {
                DBObject o = cursor.next();
                T doc = convertFromDBObject(collection, o);
                list.add(doc);
            }
            resultSize = list.size();
        } finally {
            cursor.close();
        }

        if (cacheChangesTracker != null) {
            nodesCache.putNonConflictingDocs(cacheChangesTracker, (List<NodeDocument>) list);
        }

        return list;
    } finally {
        if (cacheChangesTracker != null) {
            cacheChangesTracker.close();
        }
        stats.doneQuery(watch.elapsed(TimeUnit.NANOSECONDS), collection, fromKey, toKey,
                indexedProperty != null, resultSize, lockTime, isSlaveOk);
    }
}

From source file:org.apache.jackrabbit.oak.plugins.document.mongo.MongoVersionGCSupport.java

License:Apache License

private void logSplitDocIdsTobeDeleted(DBObject query) {
    // Fetch only the id
    final BasicDBObject keys = new BasicDBObject(Document.ID, 1);
    List<String> ids;
    DBCursor cursor = getNodeCollection().find(query, keys)
            .setReadPreference(store.getConfiguredReadPreference(NODES));
    try {//from  w  w  w .  j  av a 2 s  .  c  o m
        ids = ImmutableList.copyOf(Iterables.transform(cursor, new Function<DBObject, String>() {
            @Override
            public String apply(DBObject input) {
                return (String) input.get(Document.ID);
            }
        }));
    } finally {
        cursor.close();
    }
    StringBuilder sb = new StringBuilder("Split documents with following ids were deleted as part of GC \n");
    Joiner.on(StandardSystemProperty.LINE_SEPARATOR.value()).appendTo(sb, ids);
    LOG.debug(sb.toString());
}

From source file:org.apache.karaf.jaas.modules.mongo.internal.DefaultUserDetailService.java

License:Apache License

@Override
public UserInfo getUserInfo(String username) throws Exception {

    DB db = getDB();/*  w  w w  .j a v  a 2  s . com*/

    DBCollection users = db.getCollection(configuration.getUserCollectionName());

    // populate user
    DBObject userQuery = new BasicDBObject("username", username);

    BasicDBObjectBuilder userProjectionBuilder = BasicDBObjectBuilder.start().add("_id", 0).add("username", 1)
            .add("passwordHash", 1);

    // also add all custom user fields
    for (String prop : configuration.getAdditionalAttributes()) {
        userProjectionBuilder.add(prop, 1);
    }

    DBObject user = users.findOne(userQuery, userProjectionBuilder.get());
    // if nothing comes back just return empty handed
    if (user == null) {
        return null;
    }

    UserInfo userInfo = new UserInfo().withName((String) user.get("username"))
            .withPassword((String) user.get("passwordHash"));

    for (String prop : configuration.getAdditionalAttributes()) {

        // only add if property is actually present in the database
        if (user.containsField(prop)) {
            Object val = user.get(prop);
            userInfo.addProperty(prop, val != null ? val.toString() : "");
        }

    }

    // populate group
    DBCollection groups = db.getCollection(configuration.getGroupCollectionName());

    DBObject groupQuery = new BasicDBObject("members", username);

    DBCursor gc = groups.find(groupQuery,
            BasicDBObjectBuilder.start().append("_id", 0).append("name", 1).get());

    while (gc.hasNext()) {
        DBObject group = gc.next();
        userInfo.addGroup((String) group.get("name"));
    }
    gc.close();

    return userInfo;

}

From source file:org.apache.karaf.jaas.modules.mongo.internal.DefaultUserDetailService.java

License:Apache License

public java.util.List<String> getUserNames() throws Exception {

    List<String> result = new LinkedList<String>();

    DBCollection users = getDB().getCollection(configuration.getUserCollectionName());

    DBObject userProjection = new BasicDBObjectBuilder().add("_id", 0).add("username", 1).get();

    DBCursor uc = users.find(null, userProjection);
    while (uc.hasNext()) {
        DBObject group = uc.next();/*from  www. ja v a2  s.com*/
        result.add((String) group.get("username"));
    }
    uc.close();

    return result;
}

From source file:org.apache.metamodel.mongodb.mongo2.MongoDbDataContext.java

License:Apache License

/**
 * Performs an analysis of an available collection in a Mongo {@link DB}
 * instance and tries to detect the table structure based on the first 1000
 * documents in the collection.//w  w  w .  ja v a  2  s .c o  m
 *
 * @param db
 *            the mongo DB
 * @param collectionName
 *            the name of the collection
 * @return a table definition for mongo db.
 */
public static SimpleTableDef detectTable(DB db, String collectionName) {
    final DBCollection collection = db.getCollection(collectionName);
    final DBCursor cursor = collection.find().limit(1000);

    final SortedMap<String, Set<Class<?>>> columnsAndTypes = new TreeMap<String, Set<Class<?>>>();
    while (cursor.hasNext()) {
        DBObject object = cursor.next();
        Set<String> keysInObject = object.keySet();
        for (String key : keysInObject) {
            Set<Class<?>> types = columnsAndTypes.get(key);
            if (types == null) {
                types = new HashSet<Class<?>>();
                columnsAndTypes.put(key, types);
            }
            Object value = object.get(key);
            if (value != null) {
                types.add(value.getClass());
            }
        }
    }
    cursor.close();

    final String[] columnNames = new String[columnsAndTypes.size()];
    final ColumnType[] columnTypes = new ColumnType[columnsAndTypes.size()];

    int i = 0;
    for (Entry<String, Set<Class<?>>> columnAndTypes : columnsAndTypes.entrySet()) {
        final String columnName = columnAndTypes.getKey();
        final Set<Class<?>> columnTypeSet = columnAndTypes.getValue();
        final Class<?> columnType;
        if (columnTypeSet.size() == 1) {
            columnType = columnTypeSet.iterator().next();
        } else {
            columnType = Object.class;
        }
        columnNames[i] = columnName;
        if (columnType == ObjectId.class) {
            columnTypes[i] = ColumnType.ROWID;
        } else {
            columnTypes[i] = ColumnTypeImpl.convertColumnType(columnType);
        }
        i++;
    }

    return new SimpleTableDef(collectionName, columnNames, columnTypes);
}

From source file:org.apache.rya.indexing.mongodb.AbstractMongoIndexer.java

License:Apache License

private CloseableIteration<Statement, QueryEvaluationException> closableIterationFromCursor(
        final DBObject dbo) {
    final DBCursor cursor = collection.find(dbo);
    return new CloseableIteration<Statement, QueryEvaluationException>() {
        @Override/*w  w  w .j ava  2  s.  c o m*/
        public boolean hasNext() {
            return cursor.hasNext();
        }

        @Override
        public Statement next() throws QueryEvaluationException {
            final DBObject dbo = cursor.next();
            return RyaToRdfConversions.convertStatement(storageStrategy.deserializeDBObject(dbo));
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException("Remove not implemented");
        }

        @Override
        public void close() throws QueryEvaluationException {
            cursor.close();
        }
    };
}