List of usage examples for com.mongodb DBCursor count
public int count()
From source file:com.cyslab.craftvm.rest.mongo.WriteServlet.java
License:GNU General Public License
@Override protected void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { log.trace("doPost()"); if (!can_write(req)) { res.sendError(SC_UNAUTHORIZED);/* www . j a va 2 s . c o m*/ return; } InputStream is = req.getInputStream(); String db_name = req.getParameter("dbname"); String col_name = req.getParameter("colname"); if (db_name == null || col_name == null) { String names[] = req2mongonames(req); if (names != null) { db_name = names[0]; col_name = names[1]; } if (db_name == null || col_name == null) { error(res, SC_BAD_REQUEST, Status.get("param name missing")); return; } } boolean upsert = Boolean.parseBoolean(req.getParameter("upsert")); boolean multi = Boolean.parseBoolean(req.getParameter("multi")); DB db = mongo.getDB(db_name); // mongo auth String user = req.getParameter("user"); String passwd = req.getParameter("passwd"); if (user != null && passwd != null && (!db.isAuthenticated())) { boolean auth = db.authenticate(user, passwd.toCharArray()); if (!auth) { res.sendError(SC_UNAUTHORIZED); return; } } DBCollection col = db.getCollection(col_name); BufferedReader r = null; DBObject q = null, o = null; try { r = new BufferedReader(new InputStreamReader(is)); String q_s = r.readLine(); if (q_s == null) { error(res, SC_BAD_REQUEST, Status.get("no data")); return; } String o_s = r.readLine(); if (o_s == null) { error(res, SC_BAD_REQUEST, Status.get("obj to update missing")); return; } try { q = (DBObject) JSON.parse(q_s); o = (DBObject) JSON.parse(o_s); } catch (JSONParseException e) { error(res, SC_BAD_REQUEST, Status.get("can not parse data")); return; } } finally { if (r != null) r.close(); } // // search if (do_search) { String fn = col.getFullName(); DBCursor c = col.find(q); int cnt = c.count(); if (!multi) c.limit(1); long l = multi ? cnt : 1; String toupdate[] = new String[(int) l]; int n = 0; boolean insert = false; if (upsert && !multi && cnt == 0) insert = true; while (c.hasNext()) { DBObject _o = c.next(); ObjectId oid = (ObjectId) _o.get("_id"); String id = oid.toStringMongod(); toupdate[n++] = id; } c.close(); List<String> flds = Config.search_index_fields.get(fn); boolean commit = false; Document doc = null; Search _writer = search.get_writer(); if (flds != null && flds.size() > 0) { doc = new Document(); try { for (String fld : flds) { String val = (String) o.get(fld); if (val == null) continue; _writer.add_searchable_s(doc, fld, val); commit = true; } if (commit) _writer.commit(doc); } catch (ClassCastException e) { error(res, SC_BAD_REQUEST, Status.get("searchable fields must be type String")); return; } catch (CorruptIndexException e) { error(res, SC_BAD_REQUEST, Status.get("Search corrupt index" + e)); return; } } if (commit && insert) log.warn("upsert with search not implemented yet"); else _writer.update(toupdate, doc); } WriteResult wr = col.update(q, o, upsert, multi, write_concern); // return operation status if (do_return) { out_str(req, wr.toString()); if (wr.getError() == null) { res.setStatus(SC_BAD_REQUEST); return; } } res.setStatus(SC_CREATED); }
From source file:com.dilmus.dilshad.scabi.deprecated.DBackFileOld.java
License:Open Source License
public int updateMetaData(String fileName, ObjectId fileID, String type, String contentType) throws IOException, DScabiException, ParseException { int n = 0;//from ww w .j a v a 2 s . c o m String uploadDate = null; Date datefromDB = null; BasicDBObject documentWhere = new BasicDBObject(); documentWhere.put("_id", fileID); DBCursor cursorExist = m_table.find(documentWhere); n = cursorExist.count(); if (1 == n) { log.debug("updateMetaData() Inside 1 == n"); while (cursorExist.hasNext()) { DBObject ob = cursorExist.next(); log.debug("updateMetaData() result from ob {}", ob.toString()); //datefromDB = (String) ((BasicBSONObject) ob).getString("uploadDate"); datefromDB = ((BasicBSONObject) ob).getDate("uploadDate"); if (null == datefromDB) { throw new DScabiException("updateMetaData() Unable to get uploadDate for file : " + fileName + " fileID : " + fileID.toHexString(), "DBF.UMD.1"); } log.debug("datefromDB : {}", datefromDB); } } else if (0 == n) { log.debug("updateMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString()); throw new DScabiException( "updateMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString(), "DBF.UMD.2"); } else { log.debug("updateMetaData() Multiple matches for file : " + fileName + " fileID : " + fileID.toHexString()); throw new DScabiException("updateMetaData() Multiple matches for file : " + fileName + " fileID : " + fileID.toHexString(), "DBF.UMD.3"); } Date date = new Date(); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS"); dateFormat.setTimeZone(TimeZone.getTimeZone("ISO")); String putClientDateTime = dateFormat.format(date); // To parse from string : Date date2 = dateFormat.parse(putDateTime); // Uses java.time java 8 : ZonedDateTime now = ZonedDateTime.now( ZoneOffset.UTC ); String millisTime = "" + System.currentTimeMillis(); String nanoTime = "" + System.nanoTime(); /* If datefromDB is String SimpleDateFormat dateFormatFromDB = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); dateFormatFromDB.setTimeZone(TimeZone.getTimeZone("ISO")); CharSequence cs1 = "T"; CharSequence cs2 = "Z"; String s1 = datefromDB.replace(cs1, " "); String s2 = s1.replace(cs2, ""); Date date2 = dateFormatFromDB.parse(s2); uploadDate = dateFormat.format(date2); */ uploadDate = dateFormat.format(datefromDB); log.debug("uploadDate : {}", uploadDate); BasicDBObject documentUpdate = new BasicDBObject(); documentUpdate.append("PutFileName", fileName); documentUpdate.append("PutServerFileID", fileID.toHexString()); documentUpdate.append("PutServerUploadDateTime", uploadDate); documentUpdate.append("PutType", type); documentUpdate.append("PutContentType", contentType); documentUpdate.append("PutClientDateTime", putClientDateTime); documentUpdate.append("PutClientDateTimeInMillis", millisTime); documentUpdate.append("PutClientDateTimeInNano", nanoTime); documentUpdate.append("PutStatus", "Completed"); documentUpdate.append("PutLatestNumber", "1"); BasicDBObject updateObj = new BasicDBObject(); updateObj.put("$set", documentUpdate); WriteResult result = m_table.update(documentWhere, updateObj); if (1 != result.getN()) throw new DScabiException( "Update meta data failed for file : " + fileName + " fileID : " + fileID.toHexString(), "DBF.UMD.4"); handlePreviousVersions(fileName, fileID.toHexString(), uploadDate); return result.getN(); }
From source file:com.dilmus.dilshad.scabi.deprecated.DBackFileOld.java
License:Open Source License
private int handlePreviousVersions(String fileName, String strFileID, String strPutServerUploadDateTime) throws IOException, DScabiException { int m = 0;/*from w w w. ja va 2 s .c o m*/ int n = 0; // It is better to call this only after meta data is updated for currently uploaded file // This will skip checking for given input strFileID, file ID of currently uploaded file removeFilesIncompleteMetaData(fileName, strFileID); BasicDBObject documentFind = new BasicDBObject(); documentFind.put("PutFileName", fileName); documentFind.append("PutServerFileID", strFileID); documentFind.append("PutStatus", "Completed"); documentFind.append("PutLatestNumber", "1"); DBCursor cursor = m_table.find(documentFind); m = cursor.count(); if (1 == m) { log.debug("handlePreviousVersions() Inside 1 == n"); } else if (0 == m) { log.debug("handlePreviousVersions() No matches for file : " + fileName + " strFileID : " + strFileID); throw new DScabiException( "handlePreviousVersions() No matches for file : " + fileName + " strFileID : " + strFileID, "DBF.HPV.1"); } else { log.debug("handlePreviousVersions() Multiple matches for file : " + fileName + " strFileID : " + strFileID); throw new DScabiException("handlePreviousVersions() Multiple matches for file : " + fileName + " strFileID : " + strFileID, "DBF.HPV.2"); } BasicDBObject documentQuery = new BasicDBObject(); documentQuery.put("PutFileName", fileName); documentQuery.append("PutStatus", "Completed"); DBCursor cursorExist = m_table.find(documentQuery); n = cursorExist.count(); if (1 == n) { log.debug( "handlePreviousVersions() Information only : Inside 1 == n. Only one file / current file is found. No previous versions for file : " + fileName + " with PutStatus=Completed"); return 0; } else if (0 == n) { log.debug("handlePreviousVersions() No matches for file : " + fileName + " with PutStatus=Completed"); throw new DScabiException( "handlePreviousVersions()() No matches for file : " + fileName + " with PutStatus=Completed", "DBF.HPV.3"); } else { long lf1 = Long.parseLong(strPutServerUploadDateTime); while (cursorExist.hasNext()) { DBObject ob = cursorExist.next(); log.debug("handlePreviousVersions() result from ob {}", ob.toString()); String fid = (String) ((BasicBSONObject) ob).getString("PutServerFileID"); if (null == fid) { throw new DScabiException("PutServerFileID is missing for one version of file : " + fileName, "DBF.HPV.4"); } /* Don't use. It should be based on date-time and not on file ID if (f.equals(strFileID)) { // proceed with other versions continue; } */ String f = (String) ((BasicBSONObject) ob).getString("PutServerUploadDateTime"); if (null == f) { throw new DScabiException("PutServerUploadDateTime is missing for one version of file : " + fileName + " file ID : " + fid, "DBF.HPV.5"); } String f2 = (String) ((BasicBSONObject) ob).getString("PutLatestNumber"); if (null == f2) { throw new DScabiException("PutLatestNumber is missing for one version of file : " + fileName + " file ID : " + fid, "DBF.HPV.6"); } if (f.equals(strPutServerUploadDateTime) && f2.equals("1")) { // proceed with other versions continue; } long lf2 = Long.parseLong(f); if (lf1 < lf2 && f2.equals("1")) { // proceed with other versions continue; } if (f2.equals("1")) { // all file entries here have PutServerUploadDateTime < strPutServerUploadDateTime // there can be multiple previous versions with PutLatestNumber=1 BasicDBObject documentWhere = new BasicDBObject(); documentWhere.put("PutServerFileID", fid); BasicDBObject documentUpdate = new BasicDBObject(); documentUpdate.append("PutLatestNumber", "2"); BasicDBObject updateObj = new BasicDBObject(); updateObj.put("$set", documentUpdate); // there should be only one entry for file ID fid WriteResult result = m_table.update(documentWhere, updateObj); if (result.getN() <= 0) throw new DScabiException("Update meta data to PutLatestNumber=2 failed for file : " + fileName + " file ID : " + fid, "DBF.HPV.7"); } else { // remove all other versions m_gridFSBucket.delete(new ObjectId(fid)); } } } return 0; }
From source file:com.dilmus.dilshad.scabi.deprecated.DBackFileOld.java
License:Open Source License
private int removeFilesIncompleteMetaData(String fileName, String strFileID) { int n = 0;//from www.j a va 2 s .c o m Set<String> stMetaKeys = new HashSet<String>(); stMetaKeys.add("PutFileName"); stMetaKeys.add("PutServerFileID"); stMetaKeys.add("PutServerUploadDateTime"); stMetaKeys.add("PutType"); stMetaKeys.add("PutContentType"); stMetaKeys.add("PutClientDateTime"); stMetaKeys.add("PutClientDateTimeInMillis"); stMetaKeys.add("PutClientDateTimeInNano"); stMetaKeys.add("PutStatus"); stMetaKeys.add("PutLatestNumber"); BasicDBObject documentQuery = new BasicDBObject(); // "filename" is MongoDB/GridFS specific meta data name inside fs.files collection for each file documentQuery.put("filename", fileName); DBCursor cursorExist = m_table.find(documentQuery); n = cursorExist.count(); if (0 == n) { log.debug("removeFilesIncompleteMetaData() Information only : No file found for file : " + fileName); return 0; } else { while (cursorExist.hasNext()) { DBObject ob = cursorExist.next(); log.debug("removeFilesIncompleteMetaData() result from ob {}", ob.toString()); // "_id" is MongoDB/GridFS specific meta data name inside fs.files collection for each file ObjectId oid = ((BasicBSONObject) ob).getObjectId("_id"); if (null == oid) { // what's the use in throwing exception here? throw new DScabiException("_id is missing for file : " + fileName, "DBF.RFI.1"); // let it continue to cleanup as much as possible continue; } if (oid.toHexString().equals(strFileID)) { log.debug( "removeFilesIncompleteMetaData() Information only : skipping given input file ID : {}", strFileID); continue; } Set<String> st = ob.keySet(); if (st.containsAll(stMetaKeys)) { continue; } else { // remove file m_gridFSBucket.delete(oid); } } } return 0; }
From source file:com.dilmus.dilshad.scabi.deprecated.DBackFileOld.java
License:Open Source License
public int removeAllFilesIncompleteMetaData() { int n = 0;//from w w w. j a va 2 s.c om Set<String> stMetaKeys = new HashSet<String>(); stMetaKeys.add("PutFileName"); stMetaKeys.add("PutServerFileID"); stMetaKeys.add("PutServerUploadDateTime"); stMetaKeys.add("PutType"); stMetaKeys.add("PutContentType"); stMetaKeys.add("PutClientDateTime"); stMetaKeys.add("PutClientDateTimeInMillis"); stMetaKeys.add("PutClientDateTimeInNano"); stMetaKeys.add("PutStatus"); stMetaKeys.add("PutLatestNumber"); DBCursor cursorExist = m_table.find(); n = cursorExist.count(); if (0 == n) { log.debug("removeAllFilesIncompleteMetaData() Information only : No file found"); return 0; } else { while (cursorExist.hasNext()) { DBObject ob = cursorExist.next(); log.debug("removeAllFilesIncompleteMetaData() result from ob {}", ob.toString()); // "_id" is MongoDB/GridFS specific meta data name inside fs.files collection for each file ObjectId oid = ((BasicBSONObject) ob).getObjectId("_id"); if (null == oid) { // what's the use in throwing exception here? throw new DScabiException("_id is missing for file : " + fileName, "DBF.RAF.1"); // let it continue to cleanup as much as possible continue; } Set<String> st = ob.keySet(); if (st.containsAll(stMetaKeys)) { continue; } else { // remove file m_gridFSBucket.delete(oid); } } } return 0; }
From source file:com.dilmus.dilshad.scabi.deprecated.DBackFileOld.java
License:Open Source License
public String getLatestFileID(String fileName) throws DScabiException { // This call to removeFilesIncompleteMetaData() is needed because if the last file upload failed (network issue, etc.) // that incomplete file entry will cause getLatestFileID() to throw exception. // So good complete files already in DB will not be served. // The "" as file id below is just to enable method removeFilesIncompleteMetaData() to cleanup all incomplete files with this fileName // Don't call this as if a put is in progress for the same fileName, it will get deleted!! // // // removeFilesIncompleteMetaData(fileName, ""); String latestFileID = null;/*from w w w . j a va2 s .c o m*/ long latestServerDateTime = 0; int n = 0; // take only those file entries for fileName with complete meta-data BasicDBObject documentQuery = new BasicDBObject(); documentQuery.put("PutFileName", fileName); documentQuery.append("PutStatus", "Completed"); DBCursor cursorExist = m_table.find(documentQuery); n = cursorExist.count(); if (1 == n) { while (cursorExist.hasNext()) { DBObject ob = cursorExist.next(); log.debug("handlePreviousVersions() result from ob {}", ob.toString()); String fid = (String) ((BasicBSONObject) ob).getString("PutServerFileID"); if (null == fid) { throw new DScabiException("PutServerFileID is missing for file : " + fileName, "DBF.GLF.1"); } return fid; } } else if (0 == n) { log.debug("getLatestFileID() No matches for file : " + fileName + " with PutStatus=Completed"); throw new DScabiException( "getLatestFileID() No matches for file : " + fileName + " with PutStatus=Completed", "DBF.GLF.2"); } else { while (cursorExist.hasNext()) { DBObject ob = cursorExist.next(); log.debug("getLatestFileID() result from ob {}", ob.toString()); // Analysis needed : can we just continue with next file entry instead of throwing exception? String fid = (String) ((BasicBSONObject) ob).getString("PutServerFileID"); if (null == fid) { throw new DScabiException("PutServerFileID is missing for one version of file : " + fileName, "DBF.GLF.3"); } String f = (String) ((BasicBSONObject) ob).getString("PutServerUploadDateTime"); if (null == f) { throw new DScabiException("PutServerUploadDateTime is missing for one version of file : " + fileName + " file ID : " + fid, "DBF.GLF.4"); } String f2 = (String) ((BasicBSONObject) ob).getString("PutLatestNumber"); if (null == f2) { throw new DScabiException("PutLatestNumber is missing for one version of file : " + fileName + " file ID : " + fid, "DBF.GLF.5"); } long lf2 = Long.parseLong(f); if (latestServerDateTime < lf2 && f2.equals("1")) { // proceed with other versions latestServerDateTime = lf2; latestFileID = fid; } } } return latestFileID; }
From source file:com.dilmus.dilshad.scabi.deprecated.DBackFileOld.java
License:Open Source License
public boolean isValidMetaData(String fileName, String strFileID) throws IOException, DScabiException { int n = 0;//from w w w .j a v a2s.co m Set<String> stMetaKeys = new HashSet<String>(); stMetaKeys.add("PutFileName"); stMetaKeys.add("PutServerFileID"); stMetaKeys.add("PutServerUploadDateTime"); stMetaKeys.add("PutType"); stMetaKeys.add("PutContentType"); stMetaKeys.add("PutClientDateTime"); stMetaKeys.add("PutClientDateTimeInMillis"); stMetaKeys.add("PutClientDateTimeInNano"); stMetaKeys.add("PutStatus"); stMetaKeys.add("PutLatestNumber"); BasicDBObject documentQuery = new BasicDBObject(); ObjectId fileID = new ObjectId(strFileID); // "_id" is MongoDB/GridFS specific meta data name inside fs.files collection for each file documentQuery.put("_id", fileID); DBCursor cursorExist = m_table.find(documentQuery); n = cursorExist.count(); if (1 == n) { log.debug("isValidMetaData() Inside 1 == n"); while (cursorExist.hasNext()) { DBObject ob = cursorExist.next(); log.debug("isValidMetaData() result from ob {}", ob.toString()); Set<String> st = ob.keySet(); if (st.containsAll(stMetaKeys)) { return true; } else { return false; } } } else if (0 == n) { log.debug("isValidMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString()); throw new DScabiException( "isValidMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString(), "DBF.IVM.1"); //return false; } else { log.debug("isValidMetaData() Multiple matches for file : " + fileName + " fileID : " + fileID.toHexString()); throw new DScabiException("isValidMetaData() Multiple matches for file : " + fileName + " fileID : " + fileID.toHexString(), "DBF.IVM.2"); //return false; } return false; }
From source file:com.dilmus.dilshad.scabi.deprecated.DTableOld.java
License:Open Source License
public int insertRow(String jsonRow, String jsonCheck) throws DScabiException, IOException { log.debug("insertRow() firstTime is {}", m_firstTime); ArrayList<String> fieldList = fieldNamesUsingFindOne(); // fieldNames(); DMJson djson = new DMJson(jsonRow); Set<String> st = djson.keySet(); BasicDBObject document = new BasicDBObject(); int n = 0;// w w w. ja v a2s. c o m WriteResult result = null; DMJson djsonCheck = new DMJson(jsonCheck); Set<String> stCheck = djsonCheck.keySet(); BasicDBObject documentCheck = new BasicDBObject(); if (false == isEmpty(fieldList)) { if (false == fieldList.containsAll(st)) { throw new DScabiException( "One or more field name in jsonRow doesn't exist in fieldNames list. jsonRow : " + jsonRow + " Field Names list : " + fieldList, "DBT.IRW.1"); } if (false == fieldList.containsAll(stCheck)) { throw new DScabiException( "One or more field name in jsonCheck doesn't exist in fieldNames list. jsonCheck : " + jsonCheck + " Field Names list : " + fieldList, "DBT.IRW.2"); } if (false == st.containsAll(fieldList)) { throw new DScabiException( "One or more field name in fieldNames doesn't exist in jsonRow key set. jsonRow : " + jsonRow + " Field Names list : " + fieldList, "DBT.IRW.3"); } if (fieldList.size() != st.size()) { throw new DScabiException("Fields count doesn't match. fieldNames : " + fieldList.toString() + " with jsonRow : " + jsonRow, "DBT.IRW.4"); } } if (false == isEmpty(fieldList)) { for (String fieldName : st) { // create a document to store key and value String f = djson.getString(fieldName); if (null == f) { throw new DScabiException("Field name " + fieldName + " doesn't exist in jsonRow : " + jsonRow + " Field Names list : " + fieldList, "DBT.IRW.5"); } document.put(fieldName, f); } for (String keyCheck : stCheck) { // create a document to store key and value String f2 = djsonCheck.getString(keyCheck); if (null == f2) { throw new DScabiException( "Field name " + keyCheck + " doesn't exist in jsonCheck : " + jsonCheck, "DBT.IRW.6"); } documentCheck.put(keyCheck, f2); } DBCursor cursorExist = m_table.find(documentCheck); n = cursorExist.count(); if (0 == n) { log.debug("insertRow() Inside 0 == n"); result = m_table.insert(document); log.debug("insertRow() result is : {}", result.getN()); if (result.getN() < 0) throw new DScabiException("Insert failed for document : " + document.toString(), "DBT.IRW.7"); } else if (1 == n) { throw new DScabiException("Row already exists. jsonCheck : " + jsonCheck, "DBT.IRW.8"); // already found } else { throw new DScabiException("Row already exists, multiple matches. jsonCheck : " + jsonCheck, "DBT.IRW.9"); // already found } } else { for (String key : st) { // create a document to store key and value String f3 = djson.getString(key); if (null == f3) { throw new DScabiException("Field name " + key + " doesn't exist in jsonRow : " + jsonRow, "DBT.IRW.10"); } document.put(key, djson.getString(key)); } for (String keyCheck : stCheck) { // create a document to store key and value String f4 = djsonCheck.getString(keyCheck); if (null == f4) { throw new DScabiException( "Field name " + keyCheck + " doesn't exist in jsonCheck : " + jsonCheck, "DBT.IRW.11"); } documentCheck.put(keyCheck, djsonCheck.getString(keyCheck)); } DBCursor cursorExist = m_table.find(documentCheck); n = cursorExist.count(); if (0 == n) { log.debug("insertRow() Inside 0 == n"); result = m_table.insert(document); log.debug("insertRow() result is : {}", result.getN()); if (result.getN() < 0) throw new DScabiException("Insert failed for document : " + document.toString(), "DBT.IRW.12"); } else if (1 == n) { throw new DScabiException("Row already exists. jsonCheck : " + jsonCheck, "DBT.IRW.13"); // already found } else { throw new DScabiException("Row already exists, multiple matches. jsonCheck : " + jsonCheck, "DBT.IRW.14"); // already found } } return result.getN(); }
From source file:com.glaf.wechat.mongodb.service.impl.WxMongoDBLogServiceImpl.java
License:Apache License
public List<WxLog> getWxLogsByQueryCriteria(int start, int pageSize, WxLogQuery query) { DB db = mongoTemplate.getDb();//from w w w. j a va 2 s .c o m String tableName = "wx_log" + query.getSuffix(); DBCollection coll = db.getCollection(tableName); BasicDBObject q = new BasicDBObject(); this.fillQueryCondition(q, query); DBCursor cur = coll.find(q); List<WxLog> logs = new java.util.concurrent.CopyOnWriteArrayList<WxLog>(); int limit = query.getPageSize(); if (limit <= 0) { limit = Paging.DEFAULT_PAGE_SIZE; } if (start < cur.count()) { logger.debug("start=" + start); logger.debug("limit=" + limit); List<DBObject> list = coll.find(q).skip(start).limit(limit).toArray(); for (DBObject object : list) { WxLog log = new WxLog(); log.setId((Long) object.get("id")); log.setIp((String) object.get("ip")); log.setActorId((String) object.get("actorId")); log.setContent((String) object.get("content")); log.setOperate((String) object.get("operate")); if (object.containsField("accountId")) { log.setAccountId((Long) object.get("accountId")); } if (object.containsField("openId")) { log.setOpenId((String) object.get("openId")); } if (object.containsField("flag")) { log.setFlag((Integer) object.get("flag")); } if (object.containsField("createTime")) { long ts = (Long) object.get("createTime"); log.setCreateTime(new Date(ts)); } logs.add(log); } } return logs; }
From source file:com.hangum.tadpole.mongodb.core.composite.result.MongodbResultComposite.java
License:Open Source License
/** * ? ? ./* www .j a va2 s . c o m*/ * * @param basicFields * @param basicWhere * @param basicSort */ private void find(BasicDBObject basicFields, DBObject basicWhere, BasicDBObject basicSort, int cntSkip, int cntLimit) throws Exception { if ((cntLimit - cntSkip) >= defaultMaxCount) { // " " + defaultMaxCount + " ? . Prefernece? ? ." // Search can not exceed the number 5. Set in Perference. throw new Exception(String.format(Messages.MongoDBTableEditor_0, "" + defaultMaxCount)); //$NON-NLS-2$ //$NON-NLS-1$ } DB mongoDB = MongoDBQuery.findDB(userDB); DBCollection dbCollection = MongoDBQuery.findCollection(userDB, collectionName); // ?? DBCursor dbCursor = null; try { if (cntSkip > 0 && cntLimit > 0) { dbCursor = dbCollection.find(basicWhere, basicFields).sort(basicSort).skip(cntSkip).limit(cntLimit); } else if (cntSkip == 0 && cntLimit > 0) { dbCursor = dbCollection.find(basicWhere, basicFields).sort(basicSort).limit(cntLimit); } else { dbCursor = dbCollection.find(basicWhere, basicFields).sort(basicSort); } DBObject explainDBObject = dbCursor.explain(); sbConsoleExecuteMsg.append(JSONUtil.getPretty(explainDBObject.toString())).append("\r\n"); //$NON-NLS-1$ //$NON-NLS-2$ sbConsoleErrorMsg.append(JSONUtil.getPretty(mongoDB.getLastError().toString())).append("\r\n"); //$NON-NLS-1$ //$NON-NLS-2$ mongoDB.forceError(); mongoDB.resetError(); // if(logger.isDebugEnabled()) logger.debug(sbConsoleMsg); // ?? . refreshDBView(dbCursor, dbCursor.count()); } finally { if (dbCursor != null) dbCursor.close(); } }