Example usage for com.mongodb DBCursor size

List of usage examples for com.mongodb DBCursor size

Introduction

In this page you can find the example usage for com.mongodb DBCursor size.

Prototype

public int size() 

Source Link

Document

Counts the number of objects matching the query this does take limit/skip into consideration

Usage

From source file:org.graylog2.streams.StreamRuleServiceImpl.java

License:Open Source License

@Override
public Map<String, Long> streamRuleCountByStream() {
    final DBCursor streamIds = collection(StreamImpl.class).find(new BasicDBObject(),
            new BasicDBObject("_id", 1));

    final Map<String, Long> streamRules = new HashMap<>(streamIds.size());
    for (DBObject keys : streamIds) {
        final ObjectId streamId = (ObjectId) keys.get("_id");
        streamRules.put(streamId.toHexString(), streamRuleCount(streamId));
    }//w w  w  .j  ava 2s  . c om

    return streamRules;
}

From source file:org.jberet.repository.MongoRepository.java

License:Open Source License

@Override
public List<Long> getRunningExecutions(final String jobName) {
    //find all job instance ids belonging to the jobName
    DBObject keys = new BasicDBObject(TableColumns.JOBINSTANCEID, 1);
    DBCursor cursor = db.getCollection(TableColumns.JOB_INSTANCE)
            .find(new BasicDBObject(TableColumns.JOBNAME, jobName), keys);

    if (cursor.size() == 0) {
        throw BatchMessages.MESSAGES.noSuchJobException(jobName);
    }/*ww  w  . ja  v  a2  s.  com*/

    //add matching job instance ids to the "jobinstanceid in" list
    BasicDBList basicDBList = new BasicDBList();
    while (cursor.hasNext()) {
        final DBObject next = cursor.next();
        basicDBList.add(next.get(TableColumns.JOBINSTANCEID));
    }
    final DBObject inJobInstanceIdsClause = new BasicDBObject("$in", basicDBList);
    final DBObject query = new BasicDBObject(TableColumns.JOBINSTANCEID, inJobInstanceIdsClause);

    //create "batchstatus in" list
    basicDBList = new BasicDBList();
    basicDBList.add(BatchStatus.STARTED.name());
    basicDBList.add(BatchStatus.STARTING.name());
    final DBObject inBatchStatusClause = new BasicDBObject("$in", basicDBList);

    //combine batchstatus in clause into jobinstanceid in clause
    query.put(TableColumns.BATCHSTATUS, inBatchStatusClause);
    keys = new BasicDBObject(TableColumns.JOBEXECUTIONID, 1);
    cursor = db.getCollection(TableColumns.JOB_EXECUTION).find(query, keys);

    final List<Long> result = new ArrayList<Long>();
    while (cursor.hasNext()) {
        final DBObject next = cursor.next();
        result.add((Long) next.get(TableColumns.JOBEXECUTIONID));
    }

    return result;
}

From source file:org.mandar.analysis.recsys2014.recsysMain.java

License:Open Source License

public void run() {
    // We first need to configure the data access.
    LenskitConfiguration dataConfig = this.configureDAO(DBSettings.TRAINING_COLLECTION);
    // Now we create the LensKit configuration...
    LenskitConfiguration config = new LenskitConfiguration();
    if (algo.equals("svd")) {
        config = this.configureSVDRecommender(numFeatures, numIterations, regularizationParam,
                stoppingCondition, threshold);
    } else if (algo.equals("ii")) {
        config = this.configureIIRecommender(numNeighbours, similarityModel);
    } else if (algo.equals("uu")) {
        config = this.configureUURecommender(numNeighbours);
    } else if (algo.equals("so")) {
        config = this.configureSORecommender(damping);
    } else if (algo.equals("tfidf")) {
        config = this.configureTFIDFRecommender();
    }//ww  w . j  a va2s  .  co  m

    // There are more parameters, roles, and components that can be set. See the
    // JavaDoc for each recommender algorithm for more information.
    // Now that we have a factory, build a recommender from the configuration
    // and data source. This will compute the similarity matrix and return a recommender
    // that uses it.
    LenskitRecommender rec = null;
    try {
        LenskitRecommenderEngine engine = LenskitRecommenderEngine.newBuilder().addConfiguration(config)
                .addConfiguration(dataConfig).build();
        rec = engine.createRecommender(dataConfig);

    } catch (RecommenderBuildException e) {
        e.printStackTrace();
        System.exit(1);
    }
    // we want to recommend items
    if ("training".equals(this.goal)) {
        ItemRecommender irec = rec.getItemRecommender();
        assert irec != null; // not null because we configured one
        // for users
        try {
            MongoClient mongoClient = new MongoClient(DBSettings.DBHOST);
            DB db = mongoClient.getDB(DBSettings.DATABASE);
            DBCollection collection = db.getCollection(DBSettings.MOVIES_COLLECTION);
            for (long user : users) {
                // get 10 recommendation for the user
                List<ScoredId> recs = irec.recommend(user, 10);
                System.out.format("Recommendations for %d:\n", user);
                for (ScoredId item : recs) {
                    DBObject obj = collection.findOne(new BasicDBObject(DBSettings.FIELDS.movie, item.getId()));
                    String recTitle = obj.get("title").toString();
                    String recDirector = obj.get("director").toString();
                    String recRel = obj.get("release_date").toString();
                    String recStars = obj.get("stars").toString();

                    System.out.format("\tID:%d, %s, %s Directed By: %s Starring: %s\n", item.getId(), recTitle,
                            recRel, recDirector, recStars);
                }
            }
            mongoClient.close();
        } catch (UnknownHostException u) {
            u.printStackTrace();
        }
    } else if ("eval".equals(this.goal)) {
        //ItemScorer iscorer = rec.getItemScorer();
        RatingPredictor rat = rec.getRatingPredictor();
        File outFile = new File("data/participant_solution_" + algo + ".dat");
        String line = "";
        //String cvsSplitBy = ",";
        long eng = 0;
        int count = 0;
        try {
            long lines = Utils.countLines("data/test_solution.dat");
            //outFile.delete();
            BufferedWriter brout = new BufferedWriter((new FileWriter(outFile, false)));
            //BufferedReader br = new BufferedReader(new FileReader(csvData));
            long progress = 0;
            //br.readLine();
            System.out.println(
                    "Reading from Test Set and writing result " + "data/participant_solution_" + algo + ".dat");

            MongoClient mongoClient = new MongoClient(DBSettings.DBHOST);
            DB db = mongoClient.getDB(DBSettings.DATABASE);
            DBCollection collection = db.getCollection(DBSettings.TEST_COLLECTION_EMPTY);
            DBCursor cur = collection.find();

            ArrayList<DBObject> arr = new ArrayList<DBObject>(cur.size());

            System.out.println("Making ObjectArrayList out of test collection result");
            while (cur.hasNext()) {
                DBObject buff = cur.next();
                eng = (long) Math.abs(rat.predict(Long.parseLong(buff.get("uID").toString()),
                        Long.parseLong(buff.get("movieID").toString())));
                buff.put("engagement", eng);
                arr.add(buff);
                count++;
            }

            cur.close();
            //Now sort this by uID (desc), engagement (desc) and tweetID (desc)
            System.out.println("Sorting ObjectArrayList");
            Collections.sort(arr, new MongoComparator());
            for (int i = 0; i < arr.size(); i++) {
                brout.write(arr.get(i).get("uID") + "," + arr.get(i).get("tweetID") + ","
                        + arr.get(i).get("engagement"));
                brout.newLine();
                progress++;
                if ((progress * 100 / lines) % 10 >= 0 && (progress * 100 / lines) % 10 <= 1) {
                    System.out.println("File write Progress: " + (progress * 100 / lines) + " %");
                }
            }
            brout.close();

            ProcessBuilder pbr = new ProcessBuilder("java", "-jar",
                    "rscevaluator-0.1-jar-with-dependencies.jar", "data/test_solution.dat",
                    "data/participant_solution_" + algo + ".dat");
            Process p = pbr.start();

            BufferedReader is = new BufferedReader(new InputStreamReader(p.getInputStream()));
            double resultbuff = 0.0d;
            while ((line = is.readLine()) != null) {
                if (line.contains("nDCG@10:")) {
                    resultbuff = Double.parseDouble(line.substring(9));
                }
                System.out.println(line);
            }
            System.out.println("Writing evaluation results to MongoDB");
            this.writeAlgoTestResults(resultbuff, db);
            mongoClient.close();
            p.waitFor();

        } catch (FileNotFoundException f) {
            f.printStackTrace();
        } catch (IOException f) {
            f.printStackTrace();
        } catch (InterruptedException i) {
            i.printStackTrace();
        } catch (NullPointerException n) {
            n.printStackTrace();
        }
    }
}

From source file:org.mephi.griffin.actorcloud.storage.StorageActor.java

License:Apache License

@Override
public void onReceive(Object message) {
    logger.entering("StorageActor", "onReceive");
    if (message instanceof Get) {
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageActor <- Get from " + getSender().path().name() + ": " + message);
        Get request = (Get) message;/* w  w  w . ja  v a  2s. c o m*/
        DBCursor cursor;
        StorageResult msg;
        try {
            if (request.getQuery() != null)
                cursor = db.getCollection(request.getCollection()).find(request.getQuery());
            else
                cursor = db.getCollection(request.getCollection()).find();
            if (request.getSort() != null)
                cursor = cursor.sort(request.getSort());
            Entity[] result = new Entity[cursor.size()];
            int i = 0;
            while (cursor.hasNext()) {
                BasicDBObject tmp = (BasicDBObject) cursor.next();
                result[i++] = new Entity(tmp);
            }
            msg = new StorageResult(StorageResult.GET, request.getId(), result);
        } catch (MongoException me) {
            logger.throwing("StorageActor", "onReceive", me);
            msg = new StorageResult(StorageResult.GET, request.getId(), me.getMessage());
        }
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageResult -> " + getSender().path().name() + ": " + msg);
        getSender().tell(msg, getSelf());
    } else if (message instanceof Insert) {
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageActor <- Insert from " + getSender().path().name() + ": " + message);
        Insert request = (Insert) message;
        StorageResult msg;
        try {
            WriteResult result = db.getCollection(request.getCollection()).insert(request.getDocs());
            msg = new StorageResult(StorageResult.PUT, request.getId(), result.getN());
        } catch (MongoException me) {
            logger.throwing("StorageActor", "onReceive", me);
            msg = new StorageResult(StorageResult.PUT, request.getId(), me.getMessage());
        }
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageResult -> " + getSender().path().name() + ": " + msg);
        getSender().tell(msg, getSelf());
    } else if (message instanceof Update) {
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageActor <- Update from " + getSender().path().name() + ": " + message);
        Update request = (Update) message;
        StorageResult msg;
        try {
            WriteResult result = db.getCollection(request.getCollection()).update(request.getQuery(),
                    new BasicDBObject("$set", request.getFields()));
            msg = new StorageResult(StorageResult.UPDATE, request.getId(), result.getN());
        } catch (MongoException me) {
            logger.throwing("StorageActor", "onReceive", me);
            msg = new StorageResult(StorageResult.UPDATE, request.getId(), me.getMessage());
        }
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageResult -> " + getSender().path().name() + ": " + msg);
        getSender().tell(msg, getSelf());
    } else if (message instanceof Remove) {
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageActor <- Remove from " + getSender().path().name() + ": " + message);
        Remove request = (Remove) message;
        StorageResult msg;
        try {
            WriteResult result = db.getCollection(request.getCollection()).remove(request.getQuery());
            msg = new StorageResult(StorageResult.REMOVE, request.getId(), result.getN());
        } catch (MongoException me) {
            logger.throwing("StorageActor", "onReceive", me);
            msg = new StorageResult(StorageResult.REMOVE, request.getId(), me.getMessage());
        }
        logger.logp(Level.FINER, "StorageActor", "onReceive",
                "StorageResult -> " + getSender().path().name() + ": " + msg);
        getSender().tell(msg, getSelf());
    } else
        unhandled(message);
    logger.exiting("StorageActor", "onReceive");
}

From source file:org.mongodb.workshop.api.ComicsService.java

License:Apache License

@GET
@Path("/")
@ApiOperation(value = "Return Comics using pagination")
public DBObject all(@DefaultValue("1") @QueryParam("page") int page) {
    long count = comicsCollection.count();
    int skip = ITEMS_PER_PAGE * (page - 1);
    DBCursor cursor = comicsCollection.find().skip(skip).limit(ITEMS_PER_PAGE);
    List<DBObject> items = cursor.toArray(cursor.size());
    ResultDBObject result = new ResultDBObject(count, ITEMS_PER_PAGE, page, items);
    return result;
}

From source file:org.mongodb.workshop.api.ComicsService.java

License:Apache License

@GET
@Path("/search")
@ApiOperation(value = "Full text search on comics")
public DBObject search(@DefaultValue("1") @QueryParam("page") int page, @QueryParam("keyword") String keyword) {

    DBObject query = QueryBuilder.start().text(keyword).get();

    DBObject proj = BasicDBObjectBuilder.start().append("title", 1).append("description", 1).push("score")
            .append("$meta", "textScore").get();

    DBObject sort = BasicDBObjectBuilder.start().push("score").append("$meta", "textScore").get();

    long count = comicsCollection.count(query);
    int skip = ITEMS_PER_PAGE * (page - 1);

    DBCursor cursor = comicsCollection.find(query, proj).sort(sort).skip(skip).limit(ITEMS_PER_PAGE);

    List<DBObject> items = cursor.toArray(cursor.size());

    ResultDBObject result = new ResultDBObject(count, ITEMS_PER_PAGE, page, items);

    return result;
}

From source file:org.mongodb.workshop.api.CreatorsService.java

License:Apache License

@GET
@Path("/")
@ApiOperation(value = "Return Creators using pagination")
public DBObject all(@DefaultValue("1") @QueryParam("page") int page) {
    long count = creatorsCollection.count();
    int skip = ITEMS_PER_PAGE * (page - 1);
    DBCursor cursor = creatorsCollection.find().skip(skip).limit(ITEMS_PER_PAGE);
    List<DBObject> items = cursor.toArray(cursor.size());
    ResultDBObject result = new ResultDBObject(count, ITEMS_PER_PAGE, page, items);
    return result;
}

From source file:org.mongodb.workshop.api.CreatorsService.java

License:Apache License

@GET
@Path("/search")
@ApiOperation(value = "Full text search on Creators")
public DBObject search(@DefaultValue("1") @QueryParam("page") int page,
        @QueryParam("comicsName") String comicsName) {

    DBObject query = QueryBuilder.start().put("comics.items.name")
            .regex(java.util.regex.Pattern.compile(comicsName, Pattern.CASE_INSENSITIVE)).get();

    long count = creatorsCollection.count(query);
    int skip = ITEMS_PER_PAGE * (page - 1);
    DBCursor cursor = creatorsCollection.find(query).skip(skip).limit(ITEMS_PER_PAGE);
    List<DBObject> items = cursor.toArray(cursor.size());
    ResultDBObject result = new ResultDBObject(count, ITEMS_PER_PAGE, page, items);
    return result;
}

From source file:org.ndsc.mimicIO.db.DBManager.java

License:Apache License

/**
 * Reads all virtual networks from database and spawn an OVXNetworkManager
 * for each./* w  w w  . jav a  2s .co  m*/
 */
@SuppressWarnings("unchecked")
private void readOVXNetworks() {
    PrintStream ps = System.err;
    System.setErr(null);
    try {
        // Get a cursor over all virtual networks
        DBCollection coll = this.collections.get(DBManager.DB_VNET);
        DBCursor cursor = coll.find();
        log.info("Loading {} virtual networks from database", cursor.size());
        while (cursor.hasNext()) {
            OVXNetworkManager mngr = null;
            Map<String, Object> vnet = cursor.next().toMap();
            try {
                // Create vnet manager for each virtual network
                mngr = new OVXNetworkManager(vnet);
                OVXNetwork.reserveTenantId(mngr.getTenantId());
                // Accessing DB_KEY field through a class derived from the
                // abstract OVXSwitch
                List<Map<String, Object>> switches = (List<Map<String, Object>>) vnet.get(Switch.DB_KEY);
                List<Map<String, Object>> links = (List<Map<String, Object>>) vnet.get(Link.DB_KEY);
                List<Map<String, Object>> ports = (List<Map<String, Object>>) vnet.get(Port.DB_KEY);
                List<Map<String, Object>> routes = (List<Map<String, Object>>) vnet.get(SwitchRoute.DB_KEY);
                this.readOVXSwitches(switches, mngr);
                this.readOVXLinks(links, mngr);
                this.readOVXPorts(ports, mngr);
                this.readOVXRoutes(routes, mngr);
                DBManager.log.info("Virtual network {} waiting for {} switches, {} links and {} ports",
                        mngr.getTenantId(), mngr.getSwitchCount(), mngr.getLinkCount(), mngr.getPortCount());
            } catch (IndexOutOfBoundException | DuplicateIndexException e) {
                DBManager.log.error("Failed to load virtual network {}: {}", mngr.getTenantId(),
                        e.getMessage());
            }
        }
    } catch (Exception e) {
        log.error("Failed to load virtual networks from db: {}", e.getMessage());
    } finally {
        System.setErr(ps);
    }
}

From source file:org.opencb.cellbase.mongodb.db.ExonMongoDBAdaptor.java

License:Apache License

private List<Gene> executeQuery(DBObject query, List<String> excludeFields) {
    List<Gene> result = null;

    DBCursor cursor = null;
    if (excludeFields != null && excludeFields.size() > 0) {
        BasicDBObject returnFields = new BasicDBObject("_id", 0);
        for (String field : excludeFields) {
            returnFields.put(field, 0);/* ww  w.  ja v  a2 s. co  m*/
        }
        cursor = mongoDBCollection.find(query, returnFields);
    } else {
        cursor = mongoDBCollection.find(query);
    }

    try {
        if (cursor != null) {
            result = new ArrayList<Gene>(cursor.size());
            //                Gson jsonObjectMapper = new Gson();
            Gene gene = null;
            while (cursor.hasNext()) {
                //                    gene = (Gene) jsonObjectMapper.fromJson(cursor.next().toString(), Gene.class);
                result.add(gene);
            }
        }
    } finally {
        cursor.close();
    }
    return result;
}