List of usage examples for com.mongodb BasicDBObject toString
@SuppressWarnings("deprecation") public String toString()
Returns a JSON serialization of this object
The output will look like: {"a":1, "b":["x","y","z"]} }
From source file:org.opencb.cellbase.mongodb.db.MongoDBAdaptor.java
License:Apache License
public QueryResult getIntervalFrequencies(Region region, QueryOptions options) { // MONGO QUERY TO IMPLEMENT // db.variation.aggregate({$match: {$and: [{chromosome: "1"}, {start: {$gt: 251391, $lt: 2701391}}]}}, {$group: {_id: {$subtract: [{$divide: ["$start", 40000]}, {$divide: [{$mod: ["$start", 40000]}, 40000]}]}, totalCount: {$sum: 1}}}) // {//from w ww .ja v a 2 s. c o m // $match: { // $and: [{ // chromosome: "1" // }, { // start: { // $gt: 251391, // $lt: 2701391 // } // } // ] // } // }, { // $group: { // _id: { // $subtract: [{ // $divide: ["$start", 40000] // }, { // $divide: [{ // $mod: ["$start", 40000] // }, // 40000 // ] // } // ] // }, // totalCount: { // $sum: 1 // } // } // } int interval = options.getInt("interval"); BasicDBObject start = new BasicDBObject("$gt", region.getStart()); start.append("$lt", region.getEnd()); BasicDBList andArr = new BasicDBList(); andArr.add(new BasicDBObject("chromosome", region.getChromosome())); andArr.add(new BasicDBObject("start", start)); BasicDBObject match = new BasicDBObject("$match", new BasicDBObject("$and", andArr)); BasicDBList divide1 = new BasicDBList(); divide1.add("$start"); divide1.add(interval); BasicDBList divide2 = new BasicDBList(); divide2.add(new BasicDBObject("$mod", divide1)); divide2.add(interval); BasicDBList subtractList = new BasicDBList(); subtractList.add(new BasicDBObject("$divide", divide1)); subtractList.add(new BasicDBObject("$divide", divide2)); BasicDBObject substract = new BasicDBObject("$subtract", subtractList); DBObject totalCount = new BasicDBObject("$sum", 1); BasicDBObject g = new BasicDBObject("_id", substract); g.append("features_count", totalCount); BasicDBObject group = new BasicDBObject("$group", g); BasicDBObject sort = new BasicDBObject("$sort", new BasicDBObject("_id", 1)); logger.info("getIntervalFrequencies - (>_)>"); System.out.println(options.toString()); System.out.println(match.toString()); System.out.println(group.toString()); System.out.println(sort.toString()); QueryResult<DBObject> aggregationOutput = mongoDBCollection.aggregate(Arrays.asList(match, group, sort), options); Map<Long, DBObject> ids = new HashMap<>(); for (DBObject intervalObj : aggregationOutput.getResult()) { Long _id = Math.round((Double) intervalObj.get("_id"));//is double DBObject intervalVisited = ids.get(_id); if (intervalVisited == null) { intervalObj.put("_id", _id); intervalObj.put("start", getChunkStart(_id.intValue(), interval)); intervalObj.put("end", getChunkEnd(_id.intValue(), interval)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", Math.log((int) intervalObj.get("features_count"))); ids.put(_id, intervalObj); } else { Double sum = (Double) intervalVisited.get("features_count") + Math.log((int) intervalObj.get("features_count")); intervalVisited.put("features_count", sum.intValue()); } } /****/ BasicDBList resultList = new BasicDBList(); int firstChunkId = getChunkId(region.getStart(), interval); int lastChunkId = getChunkId(region.getEnd(), interval); DBObject intervalObj; for (int chunkId = firstChunkId; chunkId <= lastChunkId; chunkId++) { intervalObj = ids.get((long) chunkId); if (intervalObj == null) { intervalObj = new BasicDBObject(); intervalObj.put("_id", chunkId); intervalObj.put("start", getChunkStart(chunkId, interval)); intervalObj.put("end", getChunkEnd(chunkId, interval)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", 0); } resultList.add(intervalObj); } /****/ QueryResult queryResult = new QueryResult(); queryResult.setResult(resultList); queryResult.setId(region.toString()); queryResult.setResultType("frequencies"); return queryResult; /***************************/ // QueryBuilder builder = QueryBuilder.start("chromosome").is(region.getSequenceName()).and("end") // .greaterThan(region.getStart()).and("start").lessThan(region.getEnd()); // // int numIntervals = (region.getEnd() - region.getStart()) / interval + 1; // int[] intervalCount = new int[numIntervals]; // // List<Variation> variationList = executeQuery(builder.get(), Arrays.asList("id,chromosome,end,strand,type,reference,alternate,alleleString,species,assembly,source,version,transcriptVariations,xrefs,featureId,featureAlias,variantFreq,validationStatus")); // // System.out.println("Variation index"); // System.out.println("numIntervals: " + numIntervals); // for (Variation variation : variationList) { // System.out.print("gsnp start:" + variation.getStart() + " "); // if (variation.getStart() >= region.getStart() && variation.getStart() <= region.getEnd()) { // int intervalIndex = (variation.getStart() - region.getStart()) / interval; // truncate // System.out.print(intervalIndex + " "); // intervalCount[intervalIndex]++; // } // } // System.out.println("Variation index"); // // int intervalStart = region.getStart(); // int intervalEnd = intervalStart + interval - 1; // BasicDBList intervalList = new BasicDBList(); // for (int i = 0; i < numIntervals; i++) { // BasicDBObject intervalObj = new BasicDBObject(); // intervalObj.put("start", intervalStart); // intervalObj.put("end", intervalEnd); // intervalObj.put("interval", i); // intervalObj.put("value", intervalCount[i]); // intervalList.add(intervalObj); // intervalStart = intervalEnd + 1; // intervalEnd = intervalStart + interval - 1; // } // // System.out.println(region.getSequenceName()); // System.out.println(region.getStart()); // System.out.println(region.getEnd()); // return intervalList.toString(); }
From source file:org.restheart.hal.metadata.MapReduce.java
License:Open Source License
/** * @param aVars RequestContext.getAggregationVars() * @return the map function with bound aggregation variables *//*from w w w . ja v a2 s. c o m*/ public String getResolvedMap(BasicDBObject aVars) { if (aVars == null || aVars.isEmpty()) { return map; } else { String escapedAVars = "\"" + aVars.toString().replaceAll("\"", "\\\\\\\\\"") + "\""; String ret = map == null ? null : map.replaceAll(Matcher.quoteReplacement("$") + "vars", escapedAVars); return ret; } }
From source file:org.restheart.hal.metadata.MapReduce.java
License:Open Source License
/** * @param aVars RequestContext.getAggregationVars() * @return the reduce function with bound aggregation variables *//*from www. j av a 2 s.c om*/ public String getResolvedReduce(BasicDBObject aVars) { if (aVars == null || aVars.isEmpty()) { return map; } else { String escapedAVars = "\"" + aVars.toString().replaceAll("\"", "\\\\\\\\\"") + "\""; String ret = reduce == null ? null : reduce.replaceAll(Matcher.quoteReplacement("$") + "vars", escapedAVars); return ret; } }
From source file:org.restheart.handlers.schema.JsonMetaSchemaChecker.java
License:Open Source License
@Override public boolean check(HttpServerExchange exchange, RequestContext context, BasicDBObject contentToCheck, DBObject args) {// w w w . j av a 2 s . c o m if (contentToCheck == null) { return false; } try { schema.validate(new JSONObject(contentToCheck.toString())); } catch (ValidationException ve) { context.addWarning(ve.getMessage()); ve.getCausingExceptions().stream().map(ValidationException::getMessage).forEach(context::addWarning); return false; } return true; }
From source file:org.restheart.test.performance.LoadPutPT.java
License:Open Source License
/** * * @throws IOException/* w w w.ja v a 2 s.c om*/ */ public void put() throws Exception { BasicDBObject content = new BasicDBObject("random", Math.random()); Response resp = httpExecutor.execute(Request.Post(url).bodyString(content.toString(), halCT) .addHeader(Headers.CONTENT_TYPE_STRING, Representation.HAL_JSON_MEDIA_TYPE)); HttpResponse httpResp = resp.returnResponse(); assertNotNull(httpResp); HttpEntity entity = httpResp.getEntity(); assertNotNull(entity); StatusLine statusLine = httpResp.getStatusLine(); assertNotNull(statusLine); assertEquals("check status code", HttpStatus.SC_CREATED, statusLine.getStatusCode()); }
From source file:org.sipfoundry.sipxconfig.mongo.MongoReplicaSetManager.java
License:Open Source License
public void forceReconfig() { try {// ww w .j a v a2 s. c o m String id = "_id"; BasicDBObject config = new BasicDBObject(); config.put(id, "sipxecs"); config.put("version", 1); BasicDBList members = new BasicDBList(); BasicDBObject primary = new BasicDBObject(); primary.put(id, 0); primary.put(HOST, format("%s:%d", m_primaryFqdn, MongoSettings.SERVER_PORT)); primary.put("priority", 2); members.add(primary); config.put(MEMBERS, members); String command = format(COMMAND_FORCE_RECONFIG, config.toString()); BasicBSONObject result = MongoUtil.runCommand(m_localDb.getDb(), command); MongoUtil.checkForError(result); } catch (MongoCommandException e) { LOG.warn("Failed to force replica set reconfiguration", e); throw new UserException("&err.failed.forceReconfig"); } }
From source file:org.socialhistoryservices.security.MongoUserDetailService.java
License:Open Source License
public void createUser(MongoUserDetails user) { if (user.getPassword() != null) user.setPassword(HashPassword.encrypt(HASH, user.getPassword())); final DBCollection coll = coll(); BasicDBObject query = new BasicDBObject("username", user.getUsername()); DBObject tmp = coll.findOne(query);/*from w w w . ja va 2s . com*/ if (tmp != null) { if (user.getPassword() == null) { user.setPassword((String) tmp.get("password")); } if (user.getAuthorities().size() == 0) { BasicDBList authorities = (BasicDBList) tmp.get("authorities"); for (Object authority : authorities) { user.getAuthorities() .add(new org.socialhistoryservices.security.MongoAuthority((String) authority)); } } } BasicDBObject document = new BasicDBObject(); document.put("username", user.getUsername()); document.put("password", user.getPassword()); document.put("enabled", user.isEnabled()); document.put("accountNonExpired", user.isAccountNonExpired()); document.put("accountNonLocked", user.isAccountNonLocked()); document.put("credentialsNonExpired", user.isCredentialsNonExpired()); BasicDBList authorities = new BasicDBList(); for (GrantedAuthority authority : user.getAuthorities()) { authorities.add(authority.getAuthority()); } document.put("authorities", authorities); final WriteResult result = coll.update(query, document, true, false, WriteConcern.SAFE); if (result.getN() == 0) log.error(new Exception("Adding the user failed: " + result.getError())); log.info("Persisted:\n" + document.toString()); }
From source file:parser.mongodbContoller.java
public void add_page_data(pageData PageData) { try {//from ww w . java 2 s. com DB PageDataDB = MongoDbClient.getDB(PageDataDBName); if (!PageDataDB.collectionExists(PageDataCollectionName)) { DBObject DBOptions = BasicDBObjectBuilder.start().get(); PageDataDB.createCollection(PageDataCollectionName, DBOptions); } BasicDBObject PageDataDocument = new BasicDBObject(); //adds page url PageDataDocument.append("Web_Url", PageData.getPageUrl()); //adds page description if (PageData.getPageDescription() != null) { PageDataDocument.append("Page_Description", PageData.getPageDescription()); } //adds keyword frequencies if (PageData.get_keyword_frequencies_map() != null) { PageDataDocument.append("Keywords", new BasicDBObject(PageData.get_keyword_frequencies_map())); } //adds term frequencies if (PageData.get_term_frequencies_map() != null) { PageDataDocument.append("Terms", new BasicDBObject(PageData.get_term_frequencies_map())); } System.out.println("Inserting; \n " + PageDataDocument.toString()); System.out.println("Inserted " + PageDataDB.getCollection(PageDataCollectionName) .insert(PageDataDocument, WriteConcern.FSYNCED).getN() + " Documents"); System.out.println("Last error:\n" + PageDataDB.getLastError()); } catch (Exception e) { e.printStackTrace(); } }
From source file:project.ac.mongoservice.MongoDB.java
/** * Web service operation/*from w ww. j av a 2s .c o m*/ * * @param className * @param atribute * @param data * @return */ //<editor-fold defaultstate="collapsed" desc="Metodo :: find(String, String, String)"> @WebMethod(operationName = "find") public String find(@WebParam(name = "className") String className, @WebParam(name = "atribute") String atribute, @WebParam(name = "data") String data) { LinkedList<BasicDBObject> r; String found = ""; try { BasicDBObject obj = (BasicDBObject) Class.forName(className).newInstance(); DBObject fileData = chargeJson(); MongoHandler mongoHandler = new MongoHandler("service", fileData); r = (LinkedList<BasicDBObject>) mongoHandler.find(obj.getClass(), atribute, data); if (r.size() == 0) { return "Object not Found or Doesnt Exist"; } else { for (BasicDBObject ob : r) { found += ob.toString() + "&&"; } return found; } } catch (UnknownHostException ex) { return "Object Coundnt be Found"; } catch (ClassNotFoundException | InstantiationException | IllegalAccessException ex) { return "The Class Coundnt be Charged"; } }
From source file:project.ac.mongoservice.MongoDB.java
/** * Web service operation//from www. j a v a2 s. c o m * * @param className * @return */ //<editor-fold defaultstate="collapsed" desc="Metodo :: findAll(String)"> @WebMethod(operationName = "findAll") public String findAll(@WebParam(name = "className") String className) { LinkedList<BasicDBObject> rAll; String foundAll = ""; try { BasicDBObject obj = (BasicDBObject) Class.forName(className).newInstance(); DBObject fileData = chargeJson(); MongoHandler mongoHandler = new MongoHandler("service", fileData); rAll = (LinkedList<BasicDBObject>) mongoHandler.findAll(obj.getClass()); if (rAll.size() == 0) { return "Objects not Found or Dont Exist"; } else { for (BasicDBObject ob : rAll) { foundAll += ob.toString() + "&&"; } return foundAll; } } catch (UnknownHostException ex) { return "Objects Coundnt be Found"; } catch (ClassNotFoundException | InstantiationException | IllegalAccessException ex) { return "The Class Coundnt be Charged"; } }