Example usage for com.mongodb DBCursor toArray

List of usage examples for com.mongodb DBCursor toArray

Introduction

In this page you can find the example usage for com.mongodb DBCursor toArray.

Prototype

public List<DBObject> toArray() 

Source Link

Document

Converts this cursor to an array.

Usage

From source file:course.BlogPostDAOImpl.java

License:Apache License

public List<DBObject> findByDateDescending(int limit) {
    List<DBObject> posts = null;
    DBCursor cursor = postsCollection.find().sort(new BasicDBObject().append("date", -1)).limit(limit);
    try {/*from  w w w .j a v a2s. c  om*/
        posts = cursor.toArray();
    } catch (Exception e) {
        System.out.print(e.getLocalizedMessage());
    } finally {
        cursor.close();
    }
    return posts;
}

From source file:course.week3.BlogPostDAO.java

License:Apache License

public List<DBObject> findByDateDescending(int limit) {

    DBCursor objects = postsCollection.find().sort(new BasicDBObject("date", 1)).limit(limit);
    List<DBObject> posts = objects.toArray();

    // XXX HW 3.2,  Work Here
    // Return a list of DBObjects, each one a post from the posts collection

    return posts;
}

From source file:dbscan.DBScanReducer.java

License:Apache License

/**
 * The reduce function has in input an 'array' of clusters with the same key.
 * Its job is to aggregate these clusters and to analyze their neighborhood
 * to merge all points into a unique cluster.
 * This method can be called much times, virtually every time the map function emits
 * a new cluster with a key equals to another cluster.
 *
 * @param pKey the key of the clusters in input
 * @param pValues the array iterable of clusters (type of these objects is BSONWritable)
 * @param pContext the context in which map-reduce works
 *//*  w  w  w.j ava 2  s  . co  m*/
@Override
public void reduce(final Text eventKey, final Iterable<BSONWritable> eventValues, final Context eventContext)
        throws IOException, InterruptedException {

    //System.out.println("Reducing clusters with key : " + pKey +"...");

    // get the iterator
    Iterator<BSONWritable> iterator = eventValues.iterator();

    // alloc *new* cluster
    BSONWritable newCluster = new BSONWritable();

    int numPoints = 0;
    int k = 0;
    float avgLat = 0;
    float avgLon = 0;
    int numPointsAnalyzed = 0;
    ;

    // start loop for analyze every cluster
    while (iterator.hasNext()) {

        BSONObject aCluster = iterator.next();

        // at the first to loop, initialize the *new* cluster
        if (k == 0) {
            newCluster.put("loc", aCluster.get("loc"));
            newCluster.put("createdAt", aCluster.get("createdAt"));
            newCluster.put("hashtag", aCluster.get("hashtag"));
            newCluster.put("isEvent", aCluster.get("isEvent"));
        }

        // add points to *new* cluster
        numPoints += (Integer) aCluster.get("numPoints");

        // put all neighbor points to a ConcurrentHashMap
        Map<ObjectId, BSONObject> tmp = (Map<ObjectId, BSONObject>) aCluster.get("neighborPoints");
        Map<ObjectId, BSONObject> neighborPoints = new ConcurrentHashMap<ObjectId, BSONObject>();
        neighborPoints.putAll(tmp);

        // start loop for neighbor points         
        int i = 0;
        for (Iterator iteratorNeighborPoints = neighborPoints.entrySet().iterator(); iteratorNeighborPoints
                .hasNext();) {

            Map.Entry<ObjectId, BSONObject> p = (Entry<ObjectId, BSONObject>) iteratorNeighborPoints.next();

            // needs to re-query MongoDB because the point now could be visited
            // by, for example, a map thread concurrent to this reduce thread
            BSONObject point = collection.findOne(new BasicDBObject("_id", p.getValue().get("_id")));
            boolean pointModified = false;

            if (point != null) {
                if ((Boolean) point.get("visited") == false) {

                    // mark as visited
                    point.put("visited", true);
                    pointModified = true;

                    // find near points
                    BasicDBObject findNearPoints = new BasicDBObject();
                    findNearPoints.put("loc", new BasicDBObject("$within", new BasicDBObject("$center",
                            new Object[] { point.get("loc"), new Double(radius / 111.12) })));
                    findNearPoints.put("hashtag", point.get("hashtag"));
                    DBCursor nearPoints = collection.find(findNearPoints);

                    if (nearPoints.size() >= minPointsToCreateCluster) {
                        // increase performance by adding only points unvisited OR unclusterized
                        // two query BUT much less points to loop
                        findNearPoints.put("$or", new BasicDBObject[] { new BasicDBObject("visited", false),
                                new BasicDBObject("clusterized", false) });
                        nearPoints = collection.find(findNearPoints);

                        toMap(neighborPoints, nearPoints.toArray());
                    }

                    // refer to null to free a bit of memory
                    findNearPoints = null;
                    nearPoints = null;

                } // end if visited == false

                // add point to cluster
                if ((Boolean) point.get("clusterized") == false) {
                    // add the point to cluster
                    point.put("clusterized", true);
                    pointModified = true;
                    numPoints++;
                }

                // update new point in MongoDB
                if (pointModified)
                    collection.findAndModify(new BasicDBObject("_id", point.get("_id")),
                            new BasicDBObject(point.toMap()));

                // update average location
                if (((BasicBSONObject) point.get("loc")).get("lat") instanceof Double)
                    avgLat += ((Double) ((BasicBSONObject) point.get("loc")).get("lat")).floatValue();
                else
                    avgLat += ((Integer) ((BasicBSONObject) point.get("loc")).get("lat")).floatValue();
                if (((BasicBSONObject) point.get("loc")).get("lon") instanceof Double)
                    avgLon += ((Double) ((BasicBSONObject) point.get("loc")).get("lon")).floatValue();
                else
                    avgLon += ((Integer) ((BasicBSONObject) point.get("loc")).get("lon")).floatValue();

                point = null;
                i++;
                numPointsAnalyzed++;
            }
        } // end loop for neighbor points
        k++;

        aCluster = null;
        neighborPoints = null;
        System.gc();

    } // end loop for clusters

    if (numPointsAnalyzed > 0) {
        // update average location of new cluster with the weighted average
        // of points analyzed
        avgLat = avgLat / (float) numPointsAnalyzed;
        avgLon = avgLon / (float) numPointsAnalyzed;

        // if the location of analyzed points is significantly different from
        // the old cluster location, then that cluster is not an event!
        BSONObject loc = (BSONObject) newCluster.get("loc");
        LatLng oldLatLon = new LatLng((Double) loc.get("lat"), (Double) loc.get("lon"));
        LatLng newLatLon = new LatLng(avgLat, avgLon);
        double distance = oldLatLon.distance(newLatLon);

        if (distance < MAX_DISTANCE_OFFSET_NEW_CLUSTER_LOCATION)
            // mark as event
            newCluster.put("isEvent", true);
        else
            // mark as no-event
            newCluster.put("isEvent", false);

        // update new position (only if is valid)
        if (avgLat >= -90.0f && avgLat <= 90.0f && avgLon >= -180.0f && avgLon <= 180.0f) {
            DecimalFormat df = new DecimalFormat("##.######");
            Map<String, Float> newLoc = new TreeMap<String, Float>();
            newLoc.put("lat", Float.parseFloat(df.format(avgLat)));
            newLoc.put("lon", Float.parseFloat(df.format(avgLon)));
            newCluster.put("loc", newLoc);
        }

    }

    // update new cluster object
    newCluster.put("numPoints", numPoints);
    newCluster.put("neighborPoints", new HashMap<ObjectId, BSONObject>());

    // write to context if and only if the new cluster has enought points
    if (numPoints > 30)
        eventContext.write(eventKey, newCluster);

    newCluster = null;

    // IN CASE OF MEMORY PROBLEMS: force garbage collector
    // it could not be elegant and is often not recommended,
    // but it works
    System.gc();

}

From source file:edu.sjsu.cohort6.esp.dao.mongodb.CourseDAO.java

License:Open Source License

@Override
public synchronized List<Course> fetch(String query) {
    List<Course> courses = new ArrayList<>();
    DBObject dbObjQuery;//from  w  w w.  j a v  a  2 s  .  c o  m
    DBCursor cursor;
    if (!(query == null)) {
        dbObjQuery = (DBObject) JSON.parse(query);
        cursor = this.getCollection().find(dbObjQuery);
    } else {
        cursor = this.getCollection().find();
    }

    List<DBObject> dbObjects = cursor.toArray();
    for (DBObject dbObject : dbObjects) {
        Course course = morphia.fromDBObject(Course.class, dbObject);
        courses.add(course);
    }
    return courses;

}

From source file:edu.sjsu.cohort6.esp.dao.mongodb.StudentDAO.java

License:Open Source License

@Override
public synchronized List<Student> fetch(String query) {
    List<Student> students = new ArrayList<>();
    DBObject dbObjQuery;//from  w  w w  . j  av a 2  s .co  m
    DBCursor cursor;
    if (!(query == null)) {
        dbObjQuery = (DBObject) JSON.parse(query);
        cursor = this.getCollection().find(dbObjQuery);
    } else {
        cursor = this.getCollection().find();
    }

    List<DBObject> dbObjects = cursor.toArray();
    for (DBObject dbObject : dbObjects) {
        Student student = morphia.fromDBObject(Student.class, dbObject);
        students.add(student);
    }
    return students;
}

From source file:edu.sjsu.cohort6.esp.dao.mongodb.UserDAO.java

License:Open Source License

@Override
public List<User> fetch(String query) {
    List<User> users = new ArrayList<>();
    DBObject dbObjQuery;//from w ww.j a  v a  2  s . com
    DBCursor cursor;
    if (!(query == null)) {
        dbObjQuery = (DBObject) JSON.parse(query);
        cursor = this.getCollection().find(dbObjQuery);
    } else {
        cursor = this.getCollection().find();
    }

    List<DBObject> dbObjects = cursor.toArray();
    for (DBObject dbObject : dbObjects) {
        User user = morphia.fromDBObject(User.class, dbObject);
        users.add(user);
    }
    return users;
}

From source file:edu.sjsu.cohort6.openstack.db.mongodb.QuotaDAO.java

License:Open Source License

@Override
public List<Quota> fetch(String query) throws DBException {
    List<Quota> quotas = new ArrayList<>();
    DBObject dbObjQuery;//from w w w .  j  a v a  2  s .  c  o  m
    DBCursor cursor;
    if (!(query == null)) {
        dbObjQuery = (DBObject) JSON.parse(query);
        cursor = this.getCollection().find(dbObjQuery);
    } else {
        cursor = this.getCollection().find();
    }

    List<DBObject> dbObjects = cursor.toArray();
    for (DBObject dbObject : dbObjects) {
        Quota quota = morphia.fromDBObject(Quota.class, dbObject);
        quotas.add(quota);
    }
    return quotas;
}

From source file:edu.sjsu.cohort6.openstack.db.mongodb.ServiceDAO.java

License:Open Source License

@Override
public List<Service> fetch(String query) throws DBException {
    List<Service> services = new ArrayList<>();
    DBObject dbObjQuery;//from   w  w  w .j  a  v  a 2 s  .c  om
    DBCursor cursor;
    if (!(query == null)) {
        dbObjQuery = (DBObject) JSON.parse(query);
        cursor = this.getCollection().find(dbObjQuery);
    } else {
        cursor = this.getCollection().find();
    }

    List<DBObject> dbObjects = cursor.toArray();
    for (DBObject dbObject : dbObjects) {
        Service service = morphia.fromDBObject(Service.class, dbObject);
        services.add(service);
    }
    return services;

}

From source file:edu.sjsu.cohort6.openstack.db.mongodb.TaskDAO.java

License:Open Source License

@Override
public List<Task> fetch(String query) throws DBException {
    List<Task> tasks = new ArrayList<>();
    DBObject dbObjQuery;//from w w  w . j  a v a2  s  .  co m
    DBCursor cursor;
    if (!(query == null)) {
        dbObjQuery = (DBObject) JSON.parse(query);
        cursor = this.getCollection().find(dbObjQuery);
    } else {
        cursor = this.getCollection().find();
    }

    List<DBObject> dbObjects = cursor.toArray();
    for (DBObject dbObject : dbObjects) {
        Task task = morphia.fromDBObject(Task.class, dbObject);
        tasks.add(task);
    }
    return tasks;
}

From source file:edu.slu.filter.ClientInfoRecorder.java

@Override
protected String doIntercept(ActionInvocation ai) throws Exception {
    HttpServletRequest request = ServletActionContext.getRequest();
    String requestIP = request.getRemoteAddr();
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
    //check if the domain name and ip is in database
    BasicDBObject query = new BasicDBObject();
    query.append("ip", requestIP);
    DB db = MongoDBUtil.getDb();//from w  w  w. ja v a2s  .  c o  m
    DBCollection coll = db.getCollection(Constant.COLLECTION_ACCEPTEDSERVER);
    DBCursor cursor = coll.find(query);
    List<DBObject> ls_results = cursor.toArray();
    if (ls_results.size() > 0) {
        //System.out.println("[Not Modifying Data Request]: ip ========== " + requestIP + "@" + sdf.format(new Date()) + " +++++ From Registered Server");
    } else {
        //System.out.println("[Not Modifying Data Request]: ip ========== " + requestIP + "@" + sdf.format(new Date()) + " +++++ Not From Registered Server");
    }
    return ai.invoke();
}