Example usage for com.mongodb BasicDBObject getLong

List of usage examples for com.mongodb BasicDBObject getLong

Introduction

In this page you can find the example usage for com.mongodb BasicDBObject getLong.

Prototype

public long getLong(final String key) 

Source Link

Document

Returns the value of a field as a long .

Usage

From source file:fr.eolya.crawler.queue.mongodb.MongoDBSourceItemsQueue.java

License:Apache License

/** 
 * Read queue state//from  w  ww  . j ava 2  s.c  o m
 * 
 * starttime -> timestamp (log value) of the current crawl start
 * 
 * for an item of the collection :
 *       timestamp < starttime   => not in queue
 *       timestamp > starttime   => in queue
 *       timestamp = starttime   => done
 * 
 * @return last start time
 */
private Long readState() {

    // read start time
    BasicDBObject docsearch = new BasicDBObject();
    docsearch.put("_id", new ObjectId(stateId));
    DBCursor cur = coll.getColl().find(docsearch);
    if (cur.count() > 1)
        return null;
    if (cur.count() == 0)
        return null;

    BasicDBObject doc = (BasicDBObject) cur.next();
    startTime = doc.getLong("starttime");

    // read sizes
    if (startTime == 0 || rescan || startDepth > 0) {
        // TODO v4 : in fact startTime never = 0 !!!
        if (!rescan) {
            // previous crawl terminated fine

            if (startDepth > 0) {
                //String queryTimeStamp =   "{\"" + timestampFieldName + "\": {\"$ne\": " + String.valueOf(startTime) + "}}";      
                //String queryMode = "{\"crawl_mode\":a}";         
                //String query = "{\"$and\": [" + queryTimeStamp + ", " + queryMode + "]}";
                String query = "{\"depth\":" + String.valueOf(startDepth) + "}";
                size = count(query);
            } else {
                size = 0;
            }
        } else {
            // get queue size : timestamp != starttime => in queue
            //String query = "{\"" + timestampFieldName + "\": {\"$ne\": " + String.valueOf(startTime) + "}}";      

            String queryTimeStamp = "{\"" + timestampFieldName + "\": {\"$ne\": " + String.valueOf(startTime)
                    + "}}";
            String queryMode = "{\"crawl_mode\":\"a\"}";
            String query200 = "{\"crawl_status\":200}";
            String query = "{\"$and\": [" + queryTimeStamp + ", " + queryMode + ", " + query200 + "]}";

            size = count(query);
        }
        doneCount = 0;
    } else {
        // previous crawl was not terminated

        // get queue size : timestamp > starttime => in queue
        String query = "{\"" + timestampFieldName + "\": {\"$gt\": " + String.valueOf(startTime) + "}}";
        //docsearch = MongoDBHelper.JSON2BasicDBObject(query);
        //cur = coll.getColl().find(docsearch);
        //size = cur.size();
        size = count(query);

        // get done count : timestamp = starttime => done
        query = "{\"" + timestampFieldName + "\": " + String.valueOf(startTime) + "}";
        //docsearch = MongoDBHelper.JSON2BasicDBObject(query);
        //cur = coll.getColl().find(docsearch);
        //doneCount = cur.size();
        doneCount = count(query);
    }
    return startTime;
}

From source file:fr.eolya.crawler.queue.mongodb.MongoDBSourceItemsQueue.java

License:Apache License

/** 
 * Push a new item/*w  w w . j a  v a2  s .c om*/
 * 
 * @return success or not
 */
public boolean push(Map<String, Object> item) throws QueueIncoherenceException, QueueInvalidDataException {

    boolean ret = true;

    BasicDBObject doc = new BasicDBObject(item);

    String keyValue = doc.getString(uniqueKeyFieldName);
    String depth = doc.getString(depthFieldName);
    String sourceId = doc.getString(sourceIdFieldName);

    if (sourceId == null || keyValue == null || depth == null)
        throw new QueueInvalidDataException("Missing fields in json");
    if (Integer.parseInt(sourceId) != this.sourceId)
        throw new QueueInvalidDataException("Invalid source id in json");

    String referer = doc.getString(refererFieldName);

    // Get existing item in queue
    String currentDepth = null;
    String currentReferers = null;
    long currentTimestamp = 0;
    BasicDBObject docsearch = new BasicDBObject();
    docsearch.put(sourceIdFieldName, Integer.parseInt(sourceId));
    docsearch.put(hashFieldName, keyValue.hashCode());

    synchronized (collMonitor) {
        BasicDBObject curDoc = null;
        DBCursor cur = coll.getColl().find(docsearch);
        if (cur.count() > 0) {
            while (cur.hasNext() && curDoc == null) {
                curDoc = (BasicDBObject) cur.next();
                if (!keyValue.equals(doc.getString(uniqueKeyFieldName))) {
                    curDoc = null;
                }
            }
            if (curDoc != null) {
                currentDepth = curDoc.getString(depthFieldName);
                currentReferers = curDoc.getString(referersFieldName);
                currentTimestamp = curDoc.getLong(timestampFieldName);

                /*
                 * Remember : for an item of the collection :
                 *       timestamp < starttime   => not in queue
                 *       timestamp > starttime   => in queue
                 *       timestamp = starttime   => done
                 */
                if ((Long.parseLong(depth) >= Long.parseLong(currentDepth)) && (currentTimestamp >= startTime))
                    return false;
            }
        }

        // build new doc
        doc.put(hashFieldName, keyValue.hashCode());
        doc.put(timestampFieldName, new Date().getTime());

        if (referer != null) {
            if (currentReferers == null) {
                currentReferers = referer;
            } else {
                currentReferers += "/n" + referer;
            }
        }
        if (currentReferers != null) {
            doc.put(referersFieldName, currentReferers);
        }
        if (curDoc != null) {

            doc.put("content_type", curDoc.get("content_type"));
            doc.put("crawl_last_time", curDoc.get("crawl_last_time"));
            doc.put("condget_last_modified", curDoc.get("condget_last_modified"));
            doc.put("condget_etag", curDoc.get("condget_etag"));

            coll.update(curDoc, doc);
            // TODO : decrease done size in some case ???
        } else {
            doc.put(createdFieldName, new Date().getTime());
            coll.add(doc);
        }
        size++;
        return ret;
    }
}

From source file:japura.Tribes.Tribe.java

License:BSD License

public Block[] getEmeralds() {
    if (emeraldCache != null)
        return emeraldCache;
    Tribes.log("rebuilding emerald table for " + name);
    BasicDBObject query = new BasicDBObject();
    query.put("tribe", name);
    DBCursor cursor = Tribes.getEmeraldTable().find(query);
    int size = cursor.count();
    Block[] blocks = new Block[size];
    double x;/*ww w. j  a va 2s .c om*/
    double y;
    double z;
    World world;
    Location loc;
    BasicDBObject current;
    for (int i = 0; i < size; i++) {
        if (!cursor.hasNext())
            break;
        current = ((BasicDBObject) cursor.next());
        x = current.getLong("X");
        y = current.getLong("Y");
        z = current.getLong("Z");
        world = Bukkit.getWorld(current.getString("world"));
        loc = new Location(world, x, y, z);
        blocks[i] = loc.getBlock();
    }

    emeraldCache = blocks;
    return blocks;
}

From source file:japura.Tribes.Tribe.java

License:BSD License

public Block[] getDiamonds() {

    if (diamondCache != null)
        return diamondCache;
    Tribes.log("rebuilding diamond table for " + name);
    BasicDBObject query = new BasicDBObject();
    query.put("tribe", name);
    DBCursor cursor = Tribes.getDiamondTable().find(query);
    int size = cursor.count();
    Block[] blocks = new Block[size];
    double x;// w  w w .jav  a  2  s.c om
    double y;
    double z;
    World world;
    Location loc;
    BasicDBObject current;
    for (int i = 0; i < size; i++) {
        current = ((BasicDBObject) cursor.next());
        x = current.getLong("X");
        y = current.getLong("Y");
        z = current.getLong("Z");
        world = Bukkit.getWorld(current.getString("world"));
        loc = new Location(world, x, y, z);
        blocks[i] = loc.getBlock();
    }

    diamondCache = blocks;
    return blocks;
}

From source file:mini_mirc_server.miniIRCHandler.java

public String GetMessage(String username) {
    String ret = "";
    try {/* w w  w.  j av a 2  s  . c o m*/
        MongoClient mongoClient = new MongoClient();
        DB db = mongoClient.getDB("mirc");
        DBCollection coll = db.getCollection("inbox");
        BasicDBObject query = new BasicDBObject("target", username);
        JSONObject obj = new JSONObject();
        JSONArray arr = new JSONArray();
        DBCursor cursor = coll.find(query);

        try {
            while (cursor.hasNext()) {
                BasicDBObject temp = (BasicDBObject) cursor.next();
                JSONObject sav = new JSONObject();
                sav.put("target", temp.getString("target"));
                sav.put("username", temp.getString("username"));
                sav.put("channel", temp.getString("channel"));
                sav.put("message", temp.getString("message"));
                sav.put("timestamp", temp.getLong("timestamp"));
                arr.add(sav);
                coll.remove(temp);
            }
            obj.put("msg", arr);
            ret = obj.toJSONString();
        } finally {
            cursor.close();
        }
    } catch (UnknownHostException ex) {
        Logger.getLogger(miniIRCHandler.class.getName()).log(Level.SEVERE, null, ex);
    }
    UpdateLastActive(username);
    return ret;
}

From source file:org.apache.chemistry.opencmis.mongodb.MongodbUtils.java

License:Apache License

public BasicDBObject addNode(BasicDBObject node, BasicDBObject parent) {
    // Update all affected right and left values which are greater or equal
    // to the parents right value - we are incrementing to 'make room' for the 
    // new node//w ww  .  ja va  2 s.c  om
    db.getCollection(COLLECTION_CONTENT).update(
            new BasicDBObject().append("right", new BasicDBObject().append("$gte", parent.getLong("right"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("right", 2)), false, true);

    db.getCollection(COLLECTION_CONTENT).update(
            new BasicDBObject().append("left", new BasicDBObject().append("$gte", parent.getLong("right"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("left", 2)), false, true);

    // Finally insert the node into the created space in the tree, under the parent
    node.append("left", parent.getLong("right")).append("right", parent.getLong("right") + 1).append("level",
            parent.getLong("level") + 1);

    WriteResult result = db.getCollection(COLLECTION_CONTENT).insert(node);
    if (result.getN() != 1) {
        throw new MongoException("Error while inserting the node into the database.");
    } else {
        return node.append("_id", result.getUpsertedId());
    }
}

From source file:org.apache.chemistry.opencmis.mongodb.MongodbUtils.java

License:Apache License

public String getPathToNode(BasicDBObject node, DBCollection collection) {
    StringBuilder path = new StringBuilder();
    BasicDBList ancestorsList = this.getNodeAncestors(node, collection);
    for (int i = 0; i < ancestorsList.size(); i++) {

    }// w  w  w .  jav a 2 s  .  c om
    int i = 0;
    while (ancestorsList.get(i) != null) {
        DBObject ancestor = (DBObject) ancestorsList.get(i);
        path.append(PATH_SEPARATOR).append(ancestor.get("title").toString());
    }
    DBCursor ancestors = collection
            .find(new BasicDBObject().append("left", new BasicDBObject().append("$lt", node.getLong("left"))))
            .sort(new BasicDBObject().append("left", 1));
    while (ancestors.hasNext()) {
        DBObject ancestor = ancestors.next();
        path.append(PATH_SEPARATOR).append(ancestor.get("name").toString());
    }
    return path.toString();
}

From source file:org.apache.chemistry.opencmis.mongodb.MongodbUtils.java

License:Apache License

public void removeNode(BasicDBObject node) {
    // Update all affected right and left values which are greater or equal
    // to the parents right value - we are incrementing to compress the void 
    // left by the removal of the node
    db.getCollection(COLLECTION_CONTENT).update(
            new BasicDBObject().append("right", new BasicDBObject().append("$gt", node.getLong("right"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("right", -2)), false, true);

    db.getCollection(COLLECTION_CONTENT).update(
            new BasicDBObject().append("left", new BasicDBObject().append("$gt", node.getLong("right"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("left", -2)), false, true);

    // Finally remove the node 

    WriteResult result = db.getCollection(COLLECTION_CONTENT).remove(node);
    if (result.getN() != 1) {
        throw new MongoException("Error while removing the node from the database.");
    }//from  www.ja  va2  s.  c  o m
}

From source file:org.apache.chemistry.opencmis.mongodb.MongodbUtils.java

License:Apache License

public void moveNode(BasicDBObject node, BasicDBObject newParent, DBCollection collection) {

    // Get the left and right values
    Long originalLeft = node.getLong("left");
    Long originalRight = node.getLong("right");
    Long subtreeWidth = originalRight - originalLeft;

    // Compute the new left and right values for the nodeToMove
    Long newLeft = newParent.getLong("right");
    Long newRight = newParent.getLong("right") + subtreeWidth;

    // Make space for the new subtree under the new parent
    collection.update(/* ww  w.j  a  v a  2  s .co m*/
            new BasicDBObject().append("right", new BasicDBObject().append("$gte", newParent.get("right"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("right", subtreeWidth + 1)), false,
            true);

    collection.update(
            new BasicDBObject().append("left", new BasicDBObject().append("$gte", newParent.get("right"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("left", subtreeWidth + 1)), false,
            true);

    // Re-fetch the node to move, since the left and right values may have changed
    node = (BasicDBObject) collection.findOne(new BasicDBObject().append("_id", node.get("_id")));

    Long difference = node.getLong("left") - newLeft;
    // Move the old subtree into a new location
    collection.update(
            new BasicDBObject().append("left", new BasicDBObject().append("$gte", node.getLong("left")))
                    .append("right", new BasicDBObject().append("$lte", node.getLong("right"))),
            new BasicDBObject().append("$inc",
                    new BasicDBObject().append("left", 0 - difference).append("right", 0 - difference)),
            false, true);

    // Remove empty space from the parent
    //db.test.update({left:nodeToMove.left-1, right:nodeToMove.right+1}, {right:nodeToMove.left});
    collection.update(new BasicDBObject().append("right", new BasicDBObject().append("$gte", node.get("left"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("right", 0 - subtreeWidth - 1)),
            false, true);
    collection.update(new BasicDBObject().append("left", new BasicDBObject().append("$gte", node.get("left"))),
            new BasicDBObject().append("$inc", new BasicDBObject().append("left", 0 - subtreeWidth - 1)), false,
            true);
}

From source file:org.apache.chemistry.opencmis.mongodb.MongodbUtils.java

License:Apache License

public BasicDBList getNodeAncestors(BasicDBObject node, DBCollection collection) {
    DBCursor ancestors = collection/*from   w  w  w  .jav a  2  s.  com*/
            .find(new BasicDBObject().append("left", new BasicDBObject().append("$lt", node.getLong("left"))))
            .sort(new BasicDBObject().append("left", 1));
    BasicDBList ancestorsList = new BasicDBList();
    while (ancestors.hasNext()) {
        ancestorsList.add(ancestors.next());
    }
    return ancestorsList;
}