List of usage examples for com.mongodb DBCursor close
@Override public void close()
From source file:org.swissbib.docproc.flink.plugins.DSV11ContentEnrichment.java
License:Open Source License
public String getAdditionalDSV11Values(String currentTag, String tagValue) { String toReturn = ""; if (!inProductionMode) return toReturn; if (!initialized) { initDefaultValues();//ww w . j a va 2s . c o m writeLog("late initialized"); } String searchField = "match" + currentTag; if (!errorInitializing) { //tagValue = noSort.matcher(tagValue).replaceAll(""); //tagValue = createMatch.matcher(tagValue).replaceAll("").toLowerCase(); StringBuilder concatReferences = new StringBuilder(); BasicDBObject query = new BasicDBObject(searchField, tagValue); DBCursor cursor = searchCollection.find(query); boolean append = false; try { while (cursor.hasNext()) { DBObject dbObject = cursor.next(); BasicDBObject gndFields = (BasicDBObject) dbObject.get(responseField); Set<Map.Entry<String, Object>> keyValues = gndFields.entrySet(); Iterator<Map.Entry<String, Object>> it = keyValues.iterator(); while (it.hasNext()) { Map.Entry<String, Object> entry = it.next(); String key = entry.getKey(); //if (simpleTagsToUse.contains(key)) { BasicDBList dbList = (BasicDBList) entry.getValue(); Iterator<Object> dsv11Values = dbList.iterator(); while (dsv11Values.hasNext()) { append = true; String value = (String) dsv11Values.next(); value = noSort.matcher(value).replaceAll(""); String composedValue = Normalizer.normalize(value, Normalizer.Form.NFC); //System.out.println(composedValue); concatReferences.append(composedValue).append("##xx##"); } //} } } toReturn = concatReferences.toString(); if (append) { toReturn = toReturn.substring(0, toReturn.length() - 6); dsv11Processing.info("requested key and value: " + currentTag + " / " + tagValue); dsv11Processing.info("added values from DSV11: " + toReturn); } } catch (Exception excep) { dsv11ProcessingError.debug("Error while looking for DSV11 values: " + excep.getMessage()); excep.printStackTrace(); } finally { if (cursor != null) { cursor.close(); } } toReturn = duplicateDetection.removeDuplicatesFromMultiValuedField(toReturn); dsv11Processing.debug("getReferencesConcatenated: for tag: " + currentTag + " / tagValue " + tagValue + "\n" + toReturn); } return toReturn; }
From source file:org.swissbib.docproc.flink.plugins.GNDContentEnrichment.java
License:Open Source License
public String getReferencesConcatenated(String gndID) { String toReturn = ""; if (!inProductionMode) return toReturn; if (!initialized) { initDefaultValues();//from ww w. j a va 2s . c om writeLog("late initialized"); } if (!errorInitializing) { StringBuilder concatReferences = new StringBuilder(); BasicDBObject query = null; DBCursor cursor = null; try { query = new BasicDBObject(searchField, gndID); cursor = searchCollection.find(query); boolean append = false; while (cursor.hasNext()) { DBObject dbObject = cursor.next(); BasicDBObject gndFields = (BasicDBObject) dbObject.get(responseField); Set<Map.Entry<String, Object>> keyValues = gndFields.entrySet(); Iterator<Map.Entry<String, Object>> it = keyValues.iterator(); while (it.hasNext()) { Map.Entry<String, Object> entry = it.next(); String key = entry.getKey(); if (simpleTagsToUse.contains(key)) { BasicDBList dbList = (BasicDBList) entry.getValue(); Iterator<Object> gndValues = dbList.iterator(); while (gndValues.hasNext()) { append = true; String value = (String) gndValues.next(); String composedValue = Normalizer.normalize(value, Normalizer.Form.NFC); //System.out.println(composedValue); concatReferences.append(composedValue).append("##xx##"); } } } if (responseFieldMACS != null) { BasicDBObject macsField = (BasicDBObject) dbObject.get(responseFieldMACS); Set<Map.Entry<String, Object>> keyValuesMacs = macsField.entrySet(); Iterator<Map.Entry<String, Object>> itMacs = keyValuesMacs.iterator(); while (itMacs.hasNext()) { Map.Entry<String, Object> entry = itMacs.next(); String key = entry.getKey(); if (simpleTagsToUseForMACS.contains(key)) { BasicDBList dbList = (BasicDBList) entry.getValue(); Iterator<Object> macsValues = dbList.iterator(); StringBuilder macsReferences = new StringBuilder(); boolean appendMACS = false; while (macsValues.hasNext()) { append = true; appendMACS = true; String value = (String) macsValues.next(); String composedValue = Normalizer.normalize(value, Normalizer.Form.NFC); //System.out.println(composedValue); //only for logging macsReferences.append(composedValue).append("##xx##"); concatReferences.append(composedValue).append("##xx##"); } if (appendMACS) { String macsValuesForLogging = macsReferences.toString(); macsValuesForLogging = macsValuesForLogging.substring(0, macsValuesForLogging.length() - 6); macsValuesForLogging = duplicateDetection .removeDuplicatesFromMultiValuedField(macsValuesForLogging); macsProcessing.info("additional MACS values for GND " + gndID + " : " + macsValuesForLogging); } } } } } toReturn = concatReferences.toString(); if (append) { toReturn = toReturn.substring(0, toReturn.length() - 6); } } catch (Exception excep) { excep.printStackTrace(); } finally { if (cursor != null) { cursor.close(); } } //to suppress duplicates makes sense because we collect values from GND and MACS and merge them together which might produce duplicates toReturn = duplicateDetection.removeDuplicatesFromMultiValuedField(toReturn); gndProcessing.debug("getReferencesConcatenated: gndID: " + gndID + " / references: " + toReturn); } return toReturn; //old way using the SRU interface of DNB for each request // StringBuilder concatReferences = new StringBuilder(); // // //1. get the id we can use to fetch GND record // String tgndid = gndID; // writeLog("tgnid: " + tgndid); // Matcher matcher = idPattern.matcher(tgndid); // boolean append = false; // if (matcher.find()) { // // try { // // tgndid = matcher.group(2); // writeLog("matched number: " + tgndid); // String url = MessageFormat.format(urlToSource,tgndid); // // HttpURLConnection connection = getHTTPConnection (url); // InputStream is = (InputStream) connection.getContent(); // MarcXmlReader marcReader = new MarcXmlReader(is); // // while (marcReader.hasNext()) { // Record record = marcReader.next(); // // Iterator iter = record.getDataFields().iterator(); // // while (iter.hasNext()) { // DataField df = (DataField)iter.next(); // // for (GNDTagValues tags: tagsToUse) { // if(df.getTag().equalsIgnoreCase(tags.tagValue)) { // append = true; // concatReferences.append(df.getSubfield('a').getData()).append("##xx##"); // } // } // // } // // } // // // if (null != is) { // is.close(); // } // // }catch (IOException ioEx) { // ioEx.printStackTrace(); // gndProcessingError.error("getReferencesConcatenated","IOException ",ioEx); // //ioEx.printStackTrace(); // // } catch (Exception exc) { // exc.printStackTrace(); // gndProcessingError.error("getReferencesConcatenated","Exception ",exc); // } catch (Throwable thr) { // thr.printStackTrace(); // gndProcessingError.error("getReferencesConcatenated","Throwable ",thr); // } // // // // } // // // String toReturn = concatReferences.toString(); // if (append) { // toReturn = toReturn.substring(0,toReturn.length()-6); // gndProcessing.info("getReferencesConcatenated", "gndID: " + gndID + " / references: " + toReturn); // } // // writeLog("toReturn: " + toReturn); // return toReturn; }
From source file:org.vertx.java.busmods.persistor.MongoPersistor.java
License:Apache License
private void sendBatch(Message<JsonObject> message, final DBCursor cursor, final int max) { int count = 0; JsonArray results = new JsonArray(); while (cursor.hasNext() && count < max) { DBObject obj = cursor.next();//from w ww. j a va 2 s. c o m String s = obj.toString(); JsonObject m = new JsonObject(s); results.add(m); count++; } if (cursor.hasNext()) { JsonObject reply = createBatchMessage("more-exist", results); // Set a timeout, if the user doesn't reply within 10 secs, close the cursor final long timerID = vertx.setTimer(10000, new Handler<Long>() { public void handle(Long timerID) { container.getLogger().warn("Closing DB cursor on timeout"); try { cursor.close(); } catch (Exception ignore) { } } }); message.reply(reply, new Handler<Message<JsonObject>>() { public void handle(Message msg) { vertx.cancelTimer(timerID); // Get the next batch sendBatch(msg, cursor, max); } }); } else { JsonObject reply = createBatchMessage("ok", results); message.reply(reply); cursor.close(); } }
From source file:org.vertx.mods.MongoPersistor.java
License:Apache License
private void sendBatch(Message<JsonObject> message, final DBCursor cursor, final int max) { int count = 0; JsonArray results = new JsonArray(); while (cursor.hasNext() && count < max) { DBObject obj = cursor.next();/*from w w w . j a va 2s. c o m*/ String s = obj.toString(); JsonObject m = new JsonObject(s); results.add(m); count++; } if (cursor.hasNext()) { JsonObject reply = createBatchMessage("more-exist", results); // Set a timeout, if the user doesn't reply within 10 secs, close the cursor final long timerID = vertx.setTimer(10000, new Handler<Long>() { public void handle(Long timerID) { container.getLogger().warn("Closing DB cursor on timeout"); try { cursor.close(); } catch (Exception ignore) { } } }); message.reply(reply, new Handler<Message<JsonObject>>() { public void handle(Message<JsonObject> msg) { vertx.cancelTimer(timerID); // Get the next batch sendBatch(msg, cursor, max); } }); } else { JsonObject reply = createBatchMessage("ok", results); message.reply(reply); cursor.close(); } }
From source file:org.vertx.osgi.mod.mongo.MongoPersistor.java
License:Apache License
private void sendBatch(Message<JsonObject> message, final DBCursor cursor, final int max) { int count = 0; JsonArray results = new JsonArray(); while (cursor.hasNext() && count < max) { DBObject obj = cursor.next();/*ww w . j av a2 s.c om*/ String s = obj.toString(); JsonObject m = new JsonObject(s); results.add(m); count++; } if (cursor.hasNext()) { JsonObject reply = createBatchMessage("more-exist", results); // Set a timeout, if the user doesn't reply within 10 secs, close the cursor final long timerID = vertx.setTimer(10000, new Handler<Long>() { public void handle(Long timerID) { container.getLogger().warn("Closing DB cursor on timeout"); try { cursor.close(); } catch (Exception ignore) { } } }); message.reply(reply, new Handler<Message<JsonObject>>() { @SuppressWarnings({ "rawtypes", "unchecked" }) public void handle(Message msg) { vertx.cancelTimer(timerID); // Get the next batch sendBatch(msg, cursor, max); } }); } else { JsonObject reply = createBatchMessage("ok", results); message.reply(reply); cursor.close(); } }
From source file:org.wikidata.couchbase.MongoPersistHandler.java
License:Open Source License
@Override public List<DBObject> load(int start, int limit) { DBCursor cursor = getCollection().find(); cursor.skip(start);// www. jav a 2 s . c om cursor.limit(limit); List<DBObject> result = new LinkedList<DBObject>(); try { while (cursor.hasNext()) { result.add(cursor.next()); } } finally { cursor.close(); } return result; }
From source file:org.wrml.contrib.runtime.service.mongo.MongoService.java
License:Apache License
@Override public Set<Model> search(final SearchCriteria searchCriteria) throws UnsupportedOperationException { // Identify the mongo collection to query. final Dimensions resultDimensions = searchCriteria.getResultDimensions(); final URI schemaUri = resultDimensions.getSchemaUri(); final String collectionName = convertToCollectionName(schemaUri); if (!_Mongo.collectionExists(collectionName)) { LOG.debug(getConfiguration().getName() + " - Collection does not exist. Name:\n" + collectionName); return null; }// w w w . j a v a 2s . co m final DBCollection mongoCollection = _Mongo.getCollection(collectionName); if (mongoCollection == null) { // Should not happen LOG.error(getConfiguration().getName() + " - Collection should exist. Name:\n" + collectionName); return null; } // Build the mongo query object. final DBObject mongoQuery = createMongoQuery(searchCriteria); if (mongoQuery == null) { LOG.warn(getConfiguration().getName() + " - Query could not be created for: " + searchCriteria); return null; } // Build the mongo projection (fields to return). DBObject mongoKeys = null; final Set<String> projectionSlotNames = searchCriteria.getProjectionSlotNames(); if (projectionSlotNames != null && !projectionSlotNames.isEmpty()) { for (final String projectionSlotName : projectionSlotNames) { mongoKeys.put(projectionSlotName, 1); } } // Query mongo final DBCursor cursor = mongoCollection.find(mongoQuery, mongoKeys); final int resultLimit = searchCriteria.getResultLimit(); if (resultLimit > 0) { cursor.limit(resultLimit); } // TODO: Support skipping to an offset //cursor.skip(offset); // Build model results final Set<Model> resultSet = new LinkedHashSet<>(); try { while (cursor.hasNext()) { final DBObject mongoObject = cursor.next(); final Model model; try { model = convertToModel(mongoObject, null, resultDimensions); // Note: Context will set URI value in Document models. } catch (ModelReadingException e) { LOG.error(e.getMessage(), e); continue; } resultSet.add(model); } } finally { cursor.close(); } return resultSet; }
From source file:parlare.application.server.model.Database.java
private String doClientMongo() { String print = ""; System.out.println("User:" + user + " Source:" + source + " Password:" + password); try {// w w w. jav a2s . c o m // connect to the local database server MongoClient mongoClient = new MongoClient(new ServerAddress(server), Arrays.asList(MongoCredential.createMongoCRCredential(user, source, password.toCharArray())), new MongoClientOptions.Builder().build()); // get handle to "mydb" DB db = mongoClient.getDB("html5apps"); // Authenticate - optional // boolean auth = db.authenticate("foo", "bar"); // get a list of the collections in this database and print them out Set<String> collectionNames = db.getCollectionNames(); for (String s : collectionNames) { System.out.println(s); } // get a collection object to work with DBCollection testCollection = db.getCollection("testCollection"); // drop all the data in it testCollection.drop(); // make a document and insert it BasicDBObject doc = new BasicDBObject("name", "MongoDB").append("type", "database").append("count", 1) .append("info", new BasicDBObject("x", 203).append("y", 102)); testCollection.insert(doc); // get it (since it's the only one in there since we dropped the rest earlier on) DBObject myDoc = testCollection.findOne(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors for (int i = 0; i < 100; i++) { testCollection.insert(new BasicDBObject().append("i", i)); } System.out.println("total # of documents after inserting 100 small ones (should be 101) " + testCollection.getCount()); // lets get all the documents in the collection and print them out DBCursor cursor = testCollection.find(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a query to get 1 document out BasicDBObject query = new BasicDBObject("i", 71); cursor = testCollection.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a range query to get a larger subset query = new BasicDBObject("i", new BasicDBObject("$gt", 50)); // i.e. find all where i > 50 cursor = testCollection.find(query); try { while (cursor.hasNext()) { System.out.println("Cursor: " + cursor.next()); } } finally { cursor.close(); } // range query with multiple constraints query = new BasicDBObject("i", new BasicDBObject("$gt", 20).append("$lte", 30)); // i.e. 20 < i <= 30 cursor = testCollection.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // create an index on the "i" field testCollection.createIndex(new BasicDBObject("i", 1)); // create index on "i", ascending // list the indexes on the collection List<DBObject> list = testCollection.getIndexInfo(); for (DBObject o : list) { System.out.println(o); } // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); // see if any previous operation had an error System.out.println("Previous error : " + db.getPreviousError()); // force an error db.forceError(); // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); db.resetError(); // release resources mongoClient.close(); } catch (UnknownHostException ex) { Logger.getLogger(Database.class.getName()).log(Level.SEVERE, null, ex); } return print; }
From source file:poke.server.storage.jdbc.DatabaseStorage.java
License:Apache License
@Override public Request findImageDetails(Request request) { // TODO Auto-generated method stub Request.Builder bldr = Request.newBuilder(request); Payload.Builder pb = bldr.getBodyBuilder(); Header.Builder hdb = bldr.getHeaderBuilder(); PhotoHeader.Builder phb = hdb.getPhotoHeaderBuilder(); PhotoPayload.Builder photob = pb.getPhotoPayloadBuilder(); // Routing.Builder DBCollection dbColl = db.getCollection("ImageRepository"); BasicDBObject ref = new BasicDBObject(); ref.put("uuid", request.getBody().getPhotoPayload().getUuid()); DBCursor dbc = dbColl.find(ref); try {/*from ww w . j a v a 2 s .c o m*/ if (dbc.hasNext()) { DBObject bdc = dbc.next(); logger.debug("Rows fetched" + bdc.get("uuid").toString()); photob.setData(ByteString.copyFromUtf8((bdc.get("data").toString()))); photob.setUuid(bdc.get("uuid").toString()); photob.setName(bdc.get("name").toString()); //phb.setLastModified(Long.parseLong(bdc.get("modifiedAt").toString())); phb.setResponseFlag(ResponseFlag.success); //phb.setLastModified(-1); return bldr.build(); } else { // Set forward node as leader of next cluster. String clusterNodes = new String(); if (request.getHeader().getPhotoHeader().hasEntryNode()) { clusterNodes = request.getHeader().getPhotoHeader().getEntryNode(); clusterNodes += "," + DbConstants.CLUSTER_ID; } else { clusterNodes = DbConstants.CLUSTER_ID; } phb.setEntryNode(clusterNodes); } } finally { dbc.close(); } phb.setResponseFlag(ResponseFlag.failure); //phb.setLastModified(-1); return bldr.build(); }