List of usage examples for com.mongodb BasicDBObject append
@Override public BasicDBObject append(final String key, final Object val)
From source file:guesslocation.MongoQuery.java
public static void main(String[] args) { {//www . ja v a2s . c om try { // Connect to mongodb MongoClient mongo = new MongoClient("localhost", 27017); // get database // if database doesn't exists, mongodb will create it for you DB db = mongo.getDB("test"); // get collection // if collection doesn't exists, mongodb will create it for you DBCollection collection = db.getCollection("twitter"); DBCollection Outcollection = db.getCollection("user_tw"); DBCursor cursor; BasicDBObject query; //------------------------------------ // ( 1 ) collection.find() --> get all document cursor = collection.find(); System.out.println("( 1 ) .find()"); System.out.println("results --> " + cursor.count()); try { BasicDBObject IDquery = new BasicDBObject(); //2015-05-12T15:15:31Z while (cursor.hasNext()) { DBObject data = cursor.next(); Long v_user_Id = (Long) data.get("user_Id"); if (v_user_Id == null) { continue; } IDquery.append("user_Id", v_user_Id); DBCursor IDcursor = Outcollection.find(IDquery); if (IDcursor.hasNext() == false) { BasicDBObject basicObj = GetUserRecord(v_user_Id, data); try { Outcollection.insert(basicObj); } catch (Exception e) { System.err.println("error on insert " + v_user_Id); } basicObj = null; Thread.sleep(100); Outcollection.ensureIndex(new BasicDBObject("user_Id", 1), new BasicDBObject("unique", true)); } IDcursor.close(); IDquery.clear(); } } catch (InterruptedException ex) { Logger.getLogger(MongoQuery.class.getName()).log(Level.SEVERE, null, ex); } finally { cursor.close(); } System.out.println("---------------------------------"); System.exit(0); } catch (UnknownHostException ex) { Logger.getLogger(MongoQuery.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:homework.week3.course.BlogPostDAO.java
License:Apache License
public String addPost(String title, String body, List tags, String username) { System.out.println("inserting blog entry " + title + " " + body); String permalink = title.replaceAll("\\s", "_"); // whitespace becomes _ permalink = permalink.replaceAll("\\W", ""); // get rid of non // alphanumeric permalink = permalink.toLowerCase(); BasicDBObject post = new BasicDBObject(); // XXX HW 3.2, Work Here // Remember that a valid post has the following keys: // author, body, permalink, tags, comments, date //// ww w . ja va 2 s . c o m // A few hints: // - Don't forget to create an empty list of comments // - for the value of the date key, today's datetime is fine. // - tags are already in list form that implements suitable interface. // - we created the permalink for you above. // Build the post object and insert it post = post.append("title", title).append("author", username).append("tags", tags).append("body", body) .append("permalink", permalink).append("date", new Date()).append("comments", new BasicDBList()); postsCollection.save(post); return permalink; }
From source file:homework.week3.course.BlogPostDAO.java
License:Apache License
public void addPostComment(final String name, final String email, final String body, final String permalink) { // XXX HW 3.3, Work Here // Hints:// w w w . j av a 2 s . com // - email is optional and may come in NULL. Check for that. // - best solution uses an update command to the database and a suitable // operator to append the comment on to any existing list of comments DBObject post = findByPermalink(permalink); BasicDBObject comment = new BasicDBObject(); comment = comment.append("author", name).append("body", body); if (email != null) { comment = comment.append("email", email); } BasicDBList comments = (BasicDBList) post.get("comments"); comments.add(comment); postsCollection.save(post); }
From source file:hsenid.webapp.Login.java
/** * @param user/*www . ja v a2s. com*/ * Passing a user to validate username and password * @return status * Returns whether user passed the validation or not */ public static boolean ValidateByDB(User user) { boolean status = false; DBCursor cursor = null; try { DB userdata = DBCon.getConnection(); DBCollection user_cred = (DBCollection) userdata.getCollection("user_cred"); BasicDBObject query = new BasicDBObject(); query.put("Name", user.getUsername()); query.append("Pass", user.getPassword()); BasicDBObject fields = new BasicDBObject("Pass", 0).append("_id", 0); cursor = user_cred.find(query, fields); status = cursor.hasNext(); } catch (Exception e) { error = "Something bad happened. Try again later."; } finally { if (cursor != null) { cursor.close(); } } return status; }
From source file:hulop.hokoukukan.utils.MongoAdapter.java
License:Open Source License
@Override public JSONArray getLogs(String clientId, String start, String end, String skip, String limit, String event) { BasicDBObject query = new BasicDBObject(); if (clientId != null) { query.append("client", clientId); }/*from ww w . ja va 2 s . c o m*/ new BasicDBObject("Date", new BasicDBObject("$gt", start).append("$lte", end)); BasicDBObject timeRange = new BasicDBObject(); if (start != null) { timeRange.append("$gte", Long.parseLong(start)); } if (end != null) { timeRange.append("$lt", Long.parseLong(end)); } if (timeRange.size() > 0) { query.append("timestamp", timeRange); } if (event != null) { query.append("event", event); } System.out.println(query.toString()); DBCursor cursor = logCol.find(query); if (skip != null) { cursor = cursor.skip(Integer.parseInt(skip)); } if (limit != null) { cursor = cursor.limit(Integer.parseInt(limit)); } JSONArray result = new JSONArray(); try { while (cursor.hasNext()) { result.add(new JSONObject(cursor.next().toString())); } } catch (JSONException e) { e.printStackTrace(); } return result; }
From source file:in.mtap.iincube.mongoapi.MongoReader.java
License:Apache License
private DBCursor getCursor() { DBCollection collection = collectionFactory.get(); assertNotNull(queryObject, "findQuery == null"); DBCursor cursor;/*from ww w . ja v a2 s .c om*/ if (fields != null) { BasicDBObject selectFields = new BasicDBObject(); for (String field : fields) { selectFields.append(field, 1); } cursor = collection.find(queryObject, selectFields); } else { cursor = collection.find(queryObject); } if (skip > 0) cursor.skip(skip); if (limit > 0) cursor.limit(limit); if (sortObject != null) cursor.sort(sortObject); return cursor; }
From source file:io.github.apfelcreme.LitePortals.Bungee.Database.MongoController.java
License:Open Source License
/** * disabled all portals on a given world * * @param worldName the name of the world * @param server the server the sender is on * @return the number of portals that were disabled *//*from w w w .j av a 2 s .c o m*/ public int disableWorld(String worldName, Server server) { DBCollection collection = MongoConnector.getInstance().getCollection(); BasicDBObject query = new BasicDBObject("world", worldName); query.append("server", server.getAddress().getHostName() + "." + server.getAddress().getPort()); query.append("enabled", true); DBCursor dbCursor = collection.find(query); int i = 0; while (dbCursor.hasNext()) { i++; DBObject portalObject = dbCursor.next(); portalObject.put("enabled", false); PortalManager.getInstance().removePortal(UUID.fromString(portalObject.get("portal_id").toString())); collection.update(query, portalObject); } return i; }
From source file:io.github.apfelcreme.LitePortals.Bungee.Database.MongoController.java
License:Open Source License
/** * enables all portals on a given world if they are disabled * * @param worldName the name of the world * @param server the server the sender is on * @return the number of portals that were disabled *///from ww w .jav a 2s . c o m public int enableWorld(String worldName, Server server) { DBCollection collection = MongoConnector.getInstance().getCollection(); BasicDBObject query = new BasicDBObject("world", worldName); query.append("server", server.getAddress().getHostName() + "." + server.getAddress().getPort()); query.append("enabled", false); DBCursor dbCursor = collection.find(query); int i = 0; while (dbCursor.hasNext()) { i++; DBObject portalObject = dbCursor.next(); portalObject.put("enabled", true); collection.update(query, portalObject); PortalManager.getInstance().getPortals().add(createPortal(portalObject)); } return i; }
From source file:io.hawkcd.services.PipelineService.java
License:Apache License
@Override @Authorization(scope = PermissionScope.PIPELINE, type = PermissionType.NONE) public ServiceResult getAllPipelineArtifactDTOs(String searchCriteria, Integer numberOfPipelines, Integer skip, String pipelineId) {//from w ww . j a v a2s .co m ServiceResult result = null; List<Pipeline> pipelines = null; List<Pipeline> filteredPipelines = null; switch (super.DATABASE_TYPE) { case REDIS: result = this.getAll(); pipelines = (List<Pipeline>) result.getEntity(); filteredPipelines = pipelines.stream() .filter(p -> p.getPipelineDefinitionName().toLowerCase().contains(searchCriteria.toLowerCase())) .sorted((p1, p2) -> p2.getStartTime().compareTo(p1.getStartTime())) .collect(Collectors.toList()); int indexOfPipeline = this.getIndexOfPipeline(filteredPipelines, pipelineId); if (indexOfPipeline == -1) { filteredPipelines = filteredPipelines.stream().limit(numberOfPipelines) .collect(Collectors.toList()); } else { filteredPipelines = filteredPipelines.stream().skip(indexOfPipeline + 1).limit(numberOfPipelines) .collect(Collectors.toList()); } break; case MONGODB: BasicDBObject query = (BasicDBObject) new QueryBuilder().start().put("pipelineDefinitionName") .regex(Pattern.compile(searchCriteria, Pattern.CASE_INSENSITIVE)).get(); BasicDBObject sortingFiler = new BasicDBObject("pipelineDefinitionId", -1); sortingFiler.append("executionId", -1); if (pipelineId.isEmpty() || pipelineId == null || pipelineId.equals("undefined")) { result = this.getPipelineMongoService().QueryExecutor(query, sortingFiler, 0, numberOfPipelines); } else { result = this.getPipelineMongoService().QueryExecutor(query, sortingFiler, skip, numberOfPipelines); } filteredPipelines = (List<Pipeline>) result.getEntity(); break; } List<PipelineDto> pipelineDtos = new ArrayList<>(); for (Pipeline pipeline : filteredPipelines) { PipelineDto pipelineDto = new PipelineDto(); boolean isScrollCall = false; if (pipelineId.length() > 0) { isScrollCall = true; } pipelineDto.constructArtifactPipelineDto(pipeline, isScrollCall); pipelineDtos.add(pipelineDto); } result.setEntity(pipelineDtos); return result; }
From source file:io.liveoak.mongo.gridfs.GridFSDirectoryResource.java
License:Open Source License
@Override public Resource member(RequestContext ctx, String id) { GridFSResourcePath childPath = path().append(id); if (childPath.equals(ctx.resourcePath())) { // there are no more intermediary segments - this is the last parent, // here we lookup / generate the target GridFS file LinkedList<ResourcePath.Segment> segments = new LinkedList(ctx.resourcePath().segments()); // skip app segments.removeFirst();/*from www. ja va 2 s . c o m*/ // skip gridfsroot segments.removeFirst(); // init meta boolean meta = segments.getLast().matrixParameters().containsKey("meta"); DBCollection col = getUserspace().getFilesCollection(); DBObject result = null; GridFSDBObject last = null; int count = 0; for (ResourcePath.Segment segment : segments) { count++; // first segment represents root - root file has empty string for a name, and null parent String name = count == 1 ? "" : segment.name(); ObjectId parentId = count == 1 ? null : last.getId(); result = col.findOne(new BasicDBObject("filename", name).append("parent", parentId)); if (result == null) { if (ctx.requestType() == RequestType.UPDATE) { // create fileInfo for current segment BasicDBObject fileInfo = new BasicDBObject("filename", name).append("owner", ctx.securityContext().getSubject()); if (last != null) { fileInfo.append("parent", last.getId()); } // insert for directories but not for files // files get inserted via GridFS API in GridFSBlobResource if (count < segments.size()) { fileInfo.append("dir", true); // autocreate missing parent directories when putting a blob col.insert(fileInfo); } result = fileInfo; } else { return null; } } last = new GridFSDBObject(result); } // finally we got to the fileInfo representing the target resource if (last.isTrue("dir")) { // if target resource represents a directory return newChildDir(path(), last); } else { // if file if (meta) { // if last segment has matrix parameter 'meta' return meta info instead of blob content return newChildItem(last); } else { // if no ;meta, then return a blob return new GridFSBlobResource(ctx, this, id, last, childPath); } } } else if (childPath.segments().size() == ctx.resourcePath().segments().size()) { return null; } else { // pass-through segment return new GridFSDirectoryResource(ctx, this, id, childPath); } }