List of usage examples for com.mongodb DBCursor size
public int size()
From source file:fr.eolya.crawler.queue.mongodb.MongoDBSourceItemsQueue.java
License:Apache License
private long count(String query) { BasicDBObject docsearch = new BasicDBObject(); docsearch = MongoDBHelper.JSON2BasicDBObject(query); DBCursor cur = coll.getColl().find(docsearch); return cur.size(); }
From source file:fr.eolya.crawler.queue.mongodb.MongoDBSourceQueue.java
License:Apache License
public long size() { DBCursor cur = null; String query = getQuery(test, interactiveOnly, suspiciousOnly, accountId, sourceId, engineId); BasicDBObject docsearch = MongoDBHelper.JSON2BasicDBObject(query); cur = coll.getColl().find(docsearch); return cur.size(); }
From source file:gov.wa.wsdot.cms.App.java
License:Open Source License
/** * Recursive call to traverse Node datatypes. * //from ww w . j a v a2s .co m * @param node node we are processing * @param pageGuid unique id of the page * @param resourcesHashMap * @param controlsHashMap * @param genericPropertiesHashMap * @param postingCollection * @param collection */ private static void traverseNodes(Node node, String pageGuid, HashMap<String, HashMap<String, String>> controlsHashMap, HashMap<String, ResourceItem> resourcesHashMap, HashMap<String, String> genericPropertiesHashMap) { NamedNodeMap attributes; NamedNodeMap blobAttributes; String placeholderDefinition; ResourceItem resourceItem; String nodeGuid; String resourceBlobId; // Now traverse the rest of the tree in depth-first order. if (node.hasChildNodes()) { // Get the children in a list. NodeList nl = node.getChildNodes(); // How many nodes? int size = nl.getLength(); for (int i = 0; i < size; i++) { if (nl.item(i).getNodeType() == Node.ELEMENT_NODE) { /** * Controls */ if (nl.item(i).getNodeName().equalsIgnoreCase("Control")) { placeholderDefinition = nl.item(i).getAttributes().getNamedItem("Name").getNodeValue(); Element propertyElement = (Element) nl.item(i); int numProperties = propertyElement.getElementsByTagName("Property").getLength(); HashMap<String, String> property = new HashMap<String, String>(); for (int j = 0; j < numProperties; j++) { attributes = propertyElement.getElementsByTagName("Property").item(j).getAttributes(); if (attributes.getNamedItem("Value") != null) { property.put(attributes.getNamedItem("InternalIDResource").getNodeValue(), attributes.getNamedItem("Value").getNodeValue()); } else { String nameAttribute = attributes.getNamedItem("Name").getNodeValue(); if (nameAttribute.matches("ControlProp(\\d+)")) { property.put(attributes.getNamedItem("InternalIDResource").getNodeValue(), attributes.getNamedItem("Name").getNodeValue()); } } } if (!property.isEmpty()) { controlsHashMap.put(placeholderDefinition, property); } /** * Resources */ } else if (nl.item(i).getNodeName().equalsIgnoreCase("Resources")) { Element propertyElement = (Element) nl.item(i); int numResources = propertyElement.getElementsByTagName("Resource").getLength(); for (int j = 0; j < numResources; j++) { resourceItem = new ResourceItem(); attributes = propertyElement.getElementsByTagName("Resource").item(j).getAttributes(); if (attributes.getNamedItem("IsLink").getNodeValue().equals("1")) { if (attributes.getNamedItem("URL") != null) { // // <Resource URL="http://www..." ResourceBlobId="0" IsLink="1" Name="NewResource2" InternalID="2"/> // resourceItem.setIsLink(attributes.getNamedItem("IsLink").getNodeValue()); resourceItem.setUrl(attributes.getNamedItem("URL").getNodeValue()); } else { // // <Resource ResourceBlobId="0" NodeGuid="{1027105F-...}" IsLink="1" Name="NewResource3" InternalID="3"/> // // NodeGuid references Posting GUID attribute in Cha.xml file which references the Page GUID in the Fol.xml file // via the Shortcut attribute. How do I resolve this with an SDO export? HashMap? MongoDB? // // NodeGUID references the CMS database Node table and NodeGUID column. In that table there // is a FollowGUID column which points to the Node row which has the Name of the page. // Build the channel guids from channel containers already in the database. String nodeGUID = attributes.getNamedItem("NodeGuid").getNodeValue(); BasicDBObject query = new BasicDBObject("NodeGUID", nodeGUID); DBCursor postingCursor = postingCollection.find(query); String guid = ""; String postingName = "default"; String channelName = ""; String location = ""; String rootRelativeURL = ""; while (postingCursor.hasNext()) { DBObject dbObject = postingCursor.next(); guid = dbObject.get("ParentGUID").toString(); postingName = dbObject.get("Name").toString(); } if (postingCursor.size() == 0) { guid = nodeGUID; } query = new BasicDBObject("guid", guid); DBCursor channelCursor = channelCollection.find(query); while (channelCursor.hasNext()) { DBObject dbObject = channelCursor.next(); channelName = dbObject.get("name").toString(); location = dbObject.get("location").toString(); } if (channelName.isEmpty() && location.isEmpty()) { rootRelativeURL = ""; } else if (location.isEmpty()) { rootRelativeURL = "/" + channelName + "/" + postingName + ".htm"; } else { rootRelativeURL = "/" + location.replace(";", "/") + "/" + channelName + "/" + postingName + ".htm"; } resourceItem.setIsLink(attributes.getNamedItem("IsLink").getNodeValue()); resourceItem.setUrl(rootRelativeURL); } resourcesHashMap.put(attributes.getNamedItem("InternalID").getNodeValue(), resourceItem); } else { Node blobInfo = propertyElement.getElementsByTagName("Resource").item(j); Element blobInfoElement = (Element) blobInfo; if (blobInfoElement.hasChildNodes()) { // // <Resource ResourceBlobId="24041" NodeGuid="{7CE36259-...}" IsLink="0" Name="btn_video_smaller1" InternalID="1146"> // <BlobInfo FileName="resF7F9C8ACD9... .jpg" Size="1578" Guid="{F7F9C8AC-...}" FileExtension="jpg" InternalID="24041"/> // </Resource> // blobAttributes = blobInfoElement.getElementsByTagName("BlobInfo").item(0) .getAttributes(); resourceItem.setIsLink(attributes.getNamedItem("IsLink").getNodeValue()); resourceItem.setName(attributes.getNamedItem("Name").getNodeValue()); resourceItem .setFilename(blobAttributes.getNamedItem("FileName").getNodeValue()); resourceItem.setFileExtension( blobAttributes.getNamedItem("FileExtension").getNodeValue()); // Store unique reference to resource item keyed by NodeGuid. // If NodeGuid is not already in the HashMap then store a reference to it. if (attributes.getNamedItem("NodeGuid") != null) { nodeGuid = attributes.getNamedItem("NodeGuid").getNodeValue(); if (nodeGuidHashMap.get(nodeGuid) == null) { nodeGuidHashMap.put(nodeGuid, resourceItem); } // // <Resource ResourceBlobId="104084" IsLink="0" Name="SR510toSR512WEB" InternalID="3"> // <BlobInfo FileName="resDD0CDE054B....pdf" Size="257303" Guid="{DD0CDE05-...}" FileExtension="pdf" InternalID="104084" /> // </Resource> // // If the resource is stored locally in the page there is no NodeGuid to reference. // Use the ResourceBlobId as a fallback. } else { resourceBlobId = attributes.getNamedItem("ResourceBlobId").getNodeValue(); if (nodeGuidHashMap.get(resourceBlobId) == null) { nodeGuidHashMap.put(resourceBlobId, resourceItem); } } } else { // // <Resource ResourceBlobId="24041" NodeGuid="{7CE36259-...}" IsLink="0" Name="btn_video_smaller94" InternalID="1147"/> // // Duplicate ResourceBlobId referencing existing link to resource on the page. // Store unique reference to resource item keyed on NodeGuid. if (attributes.getNamedItem("NodeGuid") != null) { nodeGuid = attributes.getNamedItem("NodeGuid").getNodeValue(); resourceItem.setIsLink(nodeGuidHashMap.get(nodeGuid).getIsLink()); resourceItem.setName(nodeGuidHashMap.get(nodeGuid).getName()); resourceItem.setFilename(nodeGuidHashMap.get(nodeGuid).getFilename()); resourceItem .setFileExtension(nodeGuidHashMap.get(nodeGuid).getFileExtension()); // // <Resource ResourceBlobId="104084" IsLink="0" Name="SR510toSR512WEB1" InternalID="6" /> // // No NodeGuid. Use ResourceBlobId. } else { resourceBlobId = attributes.getNamedItem("ResourceBlobId").getNodeValue(); resourceItem.setIsLink(nodeGuidHashMap.get(resourceBlobId).getIsLink()); resourceItem.setName(nodeGuidHashMap.get(resourceBlobId).getName()); resourceItem.setFilename(nodeGuidHashMap.get(resourceBlobId).getFilename()); resourceItem.setFileExtension( nodeGuidHashMap.get(resourceBlobId).getFileExtension()); } } resourcesHashMap.put(attributes.getNamedItem("InternalID").getNodeValue(), resourceItem); } } /** * GenericProperties */ } else if (nl.item(i).getNodeName().equalsIgnoreCase("GenericProperties")) { Element propertyElement = (Element) nl.item(i); int numProperties = propertyElement.getElementsByTagName("Property").getLength(); for (int j = 0; j < numProperties; j++) { attributes = propertyElement.getElementsByTagName("Property").item(j).getAttributes(); // Posting summary. Used in News Items and varioius RSS feeds for mobile apps and GovDelivery content. if (attributes.getNamedItem("Name").getNodeValue().equalsIgnoreCase("_Description")) { if (attributes.getNamedItem("ValueLong") != null) { postingsHashMap.get(pageGuid) .setDescription(attributes.getNamedItem("ValueLong").getNodeValue()); } else if (attributes.getNamedItem("Value") != null) { postingsHashMap.get(pageGuid) .setDescription(attributes.getNamedItem("Value").getNodeValue()); ; } else { postingsHashMap.get(pageGuid).setDescription(""); } // Otherwise, store everything else for processing in the individual template model. } else { if (attributes.getNamedItem("Value") != null) { genericPropertiesHashMap.put(attributes.getNamedItem("Name").getNodeValue(), attributes.getNamedItem("Value").getNodeValue()); } else { genericPropertiesHashMap.put(attributes.getNamedItem("Name").getNodeValue(), ""); } } } } // Recursive call to traverse nodes. traverseNodes(nl.item(i), pageGuid, controlsHashMap, resourcesHashMap, genericPropertiesHashMap); } } } }
From source file:jahspotify.storage.statistics.MongoDBHistoricalStorage.java
License:Apache License
@Override public TrackHistory getHistory(final Link trackLink) { final DBCollection tracks = _db.getCollection("history"); final DBObject query = new BasicDBObject("id", trackLink.getUri()); final DBCursor dbObjects = tracks.find(query); if (dbObjects.size() == 1) { return _gson.fromJson(JSON.serialize(dbObjects.next()), TrackHistory.class); }/*from ww w. j av a 2 s . c om*/ return null; }
From source file:jahspotify.storage.statistics.MongoDBHistoricalStorage.java
License:Apache License
@Override public Collection<TrackHistory> getHistory(final int index, final int count, final HistoryCriteria... historyCriterias) { final DBCollection tracks = _db.getCollection("history"); final DBCursor dbObjects = tracks.find(); dbObjects.skip(index);/*from w ww . j a v a 2s .c o m*/ dbObjects.limit(count); final BasicDBObject orderBy = new BasicDBObject(); orderBy.put("startTime", -1); dbObjects.sort(orderBy); return new AbstractCollection<TrackHistory>() { @Override public Iterator<TrackHistory> iterator() { return new MongoDBHistoryCursor(dbObjects); } @Override public int size() { return dbObjects.size(); } }; }
From source file:logica.MovieSessionBean.java
public List<Movie> getMovies() { List<Movie> movies = new ArrayList(); DBCursor cur = movieColl.find(); System.out.println("getMovies: Found" + cur.size() + "movie(s)"); for (DBObject dbo : cur.toArray()) { movies.add(Movie.fromDBObject(dbo)); }// w w w .j a v a 2 s .co m return movies; }
From source file:lucenetools.DocIndexerMongo.java
License:Apache License
/** * Indexes the given Mongo collection using the given writer. Each tweet * will be indexed as a separate document. * * @param writer writer to use for indexing documents * @param opts options object * @param analyzer analyzer specified from config file * @throws MongoTimeoutException if the mongo instance cannot be connected to * @throws IOException if there is an issue with the document * @throws NoSuchMethodException if method not found * @throws IllegalAccessException cannot access class * @throws IllegalArgumentException wrong set of arguments * @throws InvocationTargetException instantiated class has a problem */// w w w . ja v a 2 s . c o m static void indexDocs(IndexWriter writer, Options opts, Analyzer analyzer) throws MongoTimeoutException, IOException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { DBCursor cursor = getCursor(opts); boolean noLimit = false; int origLimit = opts.limit; if (-1 == opts.limit) { noLimit = true; opts.limit = 1; } ArrayList<String> include = getKeywords(opts.include); ArrayList<String> exclude = getKeywords(opts.exclude); String tweet, tweetLower, tweetIndex; try { if (cursor.size() == 0) { throw new UnsupportedOperationException(); } else { for (int i = 0; i < opts.limit; i++) { if (cursor.hasNext()) { // read the next tweet from mongo DBObject tweetJSON = cursor.next(); tweet = tweetJSON.get("text").toString(); tweetLower = tweet.toLowerCase(); if (eval(tweetLower, include, exclude)) { // the tweet index is the MongoID associated with that tweet tweetIndex = tweetJSON.get("_id").toString(); // identify the db path to retrieve the tweet String path = getPath(opts) + "_" + tweetIndex; // use the analyzer's package name to find out if there is a custom // implementation of the Utils class with the addDocument function String packageName = opts.analyzerName.split("\\.")[0]; try { Class<?> cla = Class.forName(packageName + ".Utils"); Method method = cla.getMethod("addDocument", String.class, String.class, IndexWriter.class); method.invoke(null, tweetJSON.toString(), path, writer); // use the default Utils implementaiton } catch (ClassNotFoundException | NoSuchMethodException cnfe) { Utils.addDocument(tweetJSON.toString(), path, writer); } ++docsIndexed; if (0 == (docsIndexed % 1000)) { System.out.print("Docs analyzed and indexed: " + docsIndexed + "\r"); System.out.flush(); } // increment limit to emulate no limit being used if (noLimit) { ++opts.limit; } } else { //ensure that the limit is reached ++opts.limit; } } else { if (origLimit != -1) System.out.println("Maximum available documents is " + (i + 1) + "; unable to reach desired limit of " + origLimit + "."); break; } } } if (origLimit != -1) System.out.printf("Searched %d tweets to find %d that met the filter requirements.\n", opts.limit - 1, origLimit); } catch (MongoTimeoutException mte) { System.out.println( "Timed out while waiting to connect." + "\nEnsure that the host/port/database/collection " + "configuration in your YAML file is correct."); } catch (UnsupportedOperationException uoe) { System.out.println("\nWARNING: Cursor returned with 0 documents - " + "ensure that the database/collection \n" + "configuration in your YAML file is correct."); } finally { cursor.close(); } }
From source file:lucenetools.DocIndexerMongo.java
License:Apache License
/** * Show analysis for tweets//from w w w . j a v a 2 s .co m * * @param n number of tweets to show the analysis for * @param opts options object * @param analyzer analyzer specified from config file * @throws IOException if there is an issue with the document */ public static void showTweetAnalysis(int n, Options opts, Analyzer analyzer) throws IOException { DBCursor cursor = getCursor(opts); boolean noLimit = false; if (-1 == opts.limit) { noLimit = true; opts.limit = 1; } ArrayList<String> include = getKeywords(opts.include); ArrayList<String> exclude = getKeywords(opts.exclude); System.out.println("Analysis results for Mongo client" + ", " + n + " tweets max:\n"); int count = 0; String tweet, tweetLower; try { if (cursor.size() == 0) { throw new UnsupportedOperationException(); } else { for (int i = 0; i < opts.limit; i++) { if (cursor.hasNext()) { // read the next tweet from mongo DBObject tweetJSON = cursor.next(); tweet = tweetJSON.get("text").toString(); tweetLower = tweet.toLowerCase(); //check for keywords if (eval(tweetLower, include, exclude)) { System.out.println(tweet); DebugAnalyzer.showAnalysisFromStream(new StringReader(tweet), analyzer); System.out.println(); ++count; if (count >= n) { System.out.printf( "Searched %d tweets to find %d that met the filter requirements.\n", i + 1, n); break; } if (noLimit) ++opts.limit; } else { ++opts.limit; } } } } } catch (MongoTimeoutException mte) { System.out.println( "Timed out while waiting to connect." + "\nEnsure that the host/port/database/collection " + "configuration in your YAML file is correct."); } catch (UnsupportedOperationException uoe) { System.out.println("\nWARNING: Cursor returned with 0 documents - " + "ensure that the database/collection \n" + "configuration in your YAML file is correct."); } finally { cursor.close(); } }
From source file:me.carbou.mathieu.tictactoe.db.DBCollection.java
License:Apache License
public Stream<Map<String, Object>> find(Map where, Map fields, Map sort, Function<Map, Map> transform, int limit, int skip) { final DBCursor cursor = getCollection().find(new BasicDBObject(addWhere(where)), new BasicDBObject(preFind(fields))); if (!sort.isEmpty()) cursor.sort(new BasicDBObject(sort)); if (skip > 0) cursor.skip(skip);/*w w w.j a v a2 s . co m*/ if (limit > DB.NO_LIMIT) cursor.limit(limit); int est = cursor.size(); Spliterator<Map<String, Object>> spliterator = new Spliterators.AbstractSpliterator<Map<String, Object>>( est, NONNULL | ORDERED | SIZED | IMMUTABLE) { @Override public boolean tryAdvance(Consumer<? super Map<String, Object>> action) { if (cursor.hasNext()) { action.accept(postFind(where, cursor.next(), transform)); return true; } else { cursor.close(); return false; } } }; return StreamSupport.stream(spliterator, false); }
From source file:mongodb.findPagging.java
public static void pageResults(DBCollection collection, Integer skip) { BasicDBObject query = new BasicDBObject("first", "w"); DBCursor cursor = collection.find(query); cursor.sort(new BasicDBObject("word", 1)); cursor.limit(10);//from ww w.j av a 2 s . c o m cursor.skip(skip); System.out.println("Page " + new Integer(skip + 1).toString() + " to " + new Integer(skip + cursor.size()).toString() + ":"); findPagging.displayCursor(cursor); if (cursor.size() == 10) { findPagging.pageResults(collection, skip + 10); } }