List of usage examples for com.mongodb BasicDBList BasicDBList
BasicDBList
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * Setup detail page.//from ww w . jav a2 s.com * * @param sModule the module * @param projectId the proj id * @param variantId the variant id * @param selectedIndividuals the selected individuals * @return the model and view * @throws Exception the exception */ @RequestMapping(value = variantDetailsURL, method = RequestMethod.GET) protected ModelAndView setupDetailPage(@RequestParam("module") String sModule, @RequestParam("project") int projectId, @RequestParam("variantId") String variantId, @RequestParam("individuals") String selectedIndividuals) throws Exception { ModelAndView mav = new ModelAndView(); MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); GenotypingProject project = mongoTemplate.findById(projectId, GenotypingProject.class); mav.addObject("project", project); List<String> selectedIndividualList = Arrays.asList(selectedIndividuals.split(";")); HashMap<Integer, String> sampleToIndividualMap = new LinkedHashMap<Integer, String>(); HashMap<String, Boolean> individualMap = new LinkedHashMap<String, Boolean>(); for (String ind : listIndividualsInAlphaNumericOrder(sModule, projectId)) { for (Integer sampleIndex : project.getIndividualSampleIndexes(ind)) sampleToIndividualMap.put(sampleIndex, ind); individualMap.put(ind, selectedIndividuals.length() == 0 || selectedIndividualList.contains(ind)); } mav.addObject("individualMap", individualMap); HashMap<Integer, List<String>> sampleIDsByProject = new HashMap<Integer, List<String>>(); sampleIDsByProject.put(projectId, selectedIndividualList); VariantData var = mongoTemplate.findById(variantId, VariantData.class); mav.addObject("variantType", var.getType()); mav.addObject("refPos", var.getReferencePosition()); Map<String /* run */, Map<String /* individual */, List<Comparable /* cell value */>>> dataByRun = new TreeMap<String, Map<String, List<Comparable>>>( new AlphaNumericComparator()); Map<String /* run */, Map<String /* info field */, Object>> additionalInfoByRun = new TreeMap<String, Map<String, Object>>( new AlphaNumericComparator()); Map<String /* run */, Map<String /* info field */, VCFInfoHeaderLine>> additionalInfoDescByRun = new HashMap<String, Map<String, VCFInfoHeaderLine>>(); List<String> headerCols = new ArrayList<String>(); List<String> headerColDescs = new ArrayList<String>(); List<Criteria> crits = new ArrayList<Criteria>(); crits.add(Criteria.where("_id." + VariantRunDataId.FIELDNAME_PROJECT_ID).is(projectId)); crits.add(Criteria.where("_id." + VariantRunDataId.FIELDNAME_VARIANT_ID).is(var.getId())); List<VariantRunData> runs = mongoTemplate.find( new Query(new Criteria().andOperator(crits.toArray(new Criteria[crits.size()]))), VariantRunData.class); for (VariantRunData run : runs) { DBVCFHeader vcfHeader = null; BasicDBList andList = new BasicDBList(); andList.add(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId)); andList.add(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_RUN, run.getRunName())); DBCursor headerCursor = mongoTemplate .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class)) .find(new BasicDBObject("$and", andList)); if (headerCursor.size() > 0 && headerCols.isEmpty()) { vcfHeader = DBVCFHeader.fromDBObject(headerCursor.next()); headerCursor.close(); } Map<String /* individual */, List<Comparable /* cell value */>> genotypeRows = new TreeMap<String, List<Comparable>>( new AlphaNumericComparator()); additionalInfoByRun.put(run.getRunName(), run.getAdditionalInfo()); if (vcfHeader != null) additionalInfoDescByRun.put(run.getRunName(), vcfHeader.getmInfoMetaData()); dataByRun.put(run.getRunName(), genotypeRows); for (Integer sample : run.getSampleGenotypes().keySet()) { SampleGenotype sg = run.getSampleGenotypes().get(sample); List<Comparable> genotypeRow = new ArrayList<Comparable>(); genotypeRows.put(sampleToIndividualMap.get(sample), genotypeRow); genotypeRow.add(sg.getCode()); for (String gtInfo : sg.getAdditionalInfo().keySet()) { if (!headerCols.contains(gtInfo) /* exclude some fields that we don't want to show */ && !gtInfo .equals(VariantData.GT_FIELD_PHASED_GT) && !gtInfo.equals(VariantData.GT_FIELD_PHASED_ID) && !gtInfo.equals(VariantRunData.FIELDNAME_ADDITIONAL_INFO_EFFECT_GENE) && !gtInfo.equals(VariantRunData.FIELDNAME_ADDITIONAL_INFO_EFFECT_NAME)) { headerCols.add(gtInfo); headerColDescs.add(vcfHeader != null ? ((VCFFormatHeaderLine) vcfHeader.getmFormatMetaData().get(gtInfo)) .getDescription() : ""); } if (!headerCols.contains(gtInfo)) continue; int cellIndex = headerCols.indexOf(gtInfo); while (genotypeRow.size() < cellIndex + 2) genotypeRow.add(null); genotypeRow.set(cellIndex + 1, sg.getAdditionalInfo().get(gtInfo)); } } } mav.addObject("headerAdditionalInfo", headerCols); mav.addObject("headerAdditionalInfoDesc", headerColDescs); mav.addObject("runAdditionalInfo", additionalInfoByRun); mav.addObject("runAdditionalInfoDesc", additionalInfoDescByRun); mav.addObject("dataByRun", dataByRun); mav.addObject("knownAlleles", var.getKnownAlleleList()); return mav; }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * Selection density./* w ww .j av a 2s .c o m*/ * * @param request the request * @param sModule the module * @param projId the proj id * @param selectedVariantTypes the selected variant types * @param selectedSequences the selected sequences * @param selectedIndividuals the selected individuals * @param gtPattern the genotype pattern * @param genotypeQualityThreshold the genotype quality threshold * @param readDepthThreshold the read depth threshold * @param missingData the missing data * @param minmaf the minmaf * @param maxmaf the maxmaf * @param minposition the minposition * @param maxposition the maxposition * @param alleleCount the allele count * @param geneName the gene name * @param variantEffects the variant effects * @param processID the process id * @param displayedSequence the displayed sequence * @param displayedRangeMin the displayed range min * @param displayedRangeMax the displayed range max * @param displayedRangeIntervalCount the displayed range interval count * @param displayedVariantType the displayed variant type * @return the map * @throws Exception the exception */ @RequestMapping(selectionDensityDataURL) protected @ResponseBody Map<Long, Long> selectionDensity(HttpServletRequest request, @RequestParam("module") String sModule, @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes, @RequestParam("sequences") String selectedSequences, @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern, @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold, @RequestParam("readDepthThreshold") int readDepthThreshold, @RequestParam("missingData") double missingData, @RequestParam("minmaf") Float minmaf, @RequestParam("maxmaf") Float maxmaf, @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects, @RequestParam("processID") String processID, @RequestParam("displayedSequence") String displayedSequence, @RequestParam(required = false, value = "displayedRangeMin") Long displayedRangeMin, @RequestParam(required = false, value = "displayedRangeMax") Long displayedRangeMax, @RequestParam(required = false, value = "displayedRangeIntervalCount") final Integer displayedRangeIntervalCount, @RequestParam(required = false, value = "displayedVariantType") String displayedVariantType) throws Exception { long before = System.currentTimeMillis(); String token = processID.substring(1 + processID.indexOf('|')); ProgressIndicator progress = new ProgressIndicator(token, new String[] { "Calculating " + (displayedVariantType != null ? displayedVariantType + " " : "") + "variant density on sequence " + displayedSequence }); ProgressIndicator.registerProgressIndicator(progress); final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); long count = countVariants(request, sModule, projId, selectedVariantTypes, selectedSequences, selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects, "" /* if we pass exportID then the progress indicator is going to be replaced by another, and we don't need it for counting since we cache count values */); DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, token, false); // boolean fStillGotUnwantedTempVariants = count < tmpVarColl.count(); long nTempVarCount = mongoTemplate.count(new Query(), tmpVarColl.getName()); final boolean fWorkingOnFullDataset = mongoTemplate.count(null, VariantData.class) == count; if (!fWorkingOnFullDataset && nTempVarCount == 0) { progress.setError(MESSAGE_TEMP_RECORDS_NOT_FOUND); return null; } final String actualCollectionName = fWorkingOnFullDataset ? mongoTemplate.getCollectionName(VariantData.class) : tmpVarColl.getName(); if (displayedRangeMin == null || displayedRangeMax == null) { BasicDBList matchAndList = new BasicDBList(); matchAndList.add(new BasicDBObject( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE, displayedSequence)); if (displayedVariantType != null) matchAndList.add(new BasicDBObject(VariantData.FIELDNAME_TYPE, displayedVariantType)); BasicDBObject match = new BasicDBObject("$match", new BasicDBObject("$and", matchAndList)); BasicDBObject groupFields = new BasicDBObject("_id", null); groupFields.put("min", new BasicDBObject("$min", "$" + (VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE))); groupFields.put("max", new BasicDBObject("$max", "$" + (VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE))); BasicDBObject group = new BasicDBObject("$group", groupFields); List<DBObject> pipeline = new ArrayList<DBObject>(); pipeline.add(match); pipeline.add(group); Iterator<DBObject> iterator = mongoTemplate.getCollection(actualCollectionName).aggregate(pipeline) .results().iterator(); if (!iterator.hasNext()) { progress.markAsComplete(); return null; // no variants found matching filter } DBObject aggResult = (DBObject) iterator.next(); if (displayedRangeMin == null) displayedRangeMin = (Long) aggResult.get("min"); if (displayedRangeMax == null) displayedRangeMax = (Long) aggResult.get("max"); } final AtomicInteger finishedThreadCount = new AtomicInteger(0), nTotalTreatedVariantCountfinishedThreadCount = new AtomicInteger(0); final ConcurrentHashMap<Long, Long> result = new ConcurrentHashMap<Long, Long>(); final int intervalSize = Math.max(1, (int) ((displayedRangeMax - displayedRangeMin) / displayedRangeIntervalCount)); final ArrayList<Thread> threadsToWaitFor = new ArrayList<Thread>(); final long rangeMin = displayedRangeMin; final ProgressIndicator finalProgress = progress; for (int i = 0; i < displayedRangeIntervalCount; i++) { List<Criteria> crits = new ArrayList<Criteria>(); crits.add(Criteria .where(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE) .is(displayedSequence)); // if (fStillGotUnwantedTempVariants) // crits.add(Criteria.where(VariantData.FIELDNAME_VERSION).exists(true)); if (displayedVariantType != null) crits.add(Criteria.where(VariantData.FIELDNAME_TYPE).is(displayedVariantType)); String startSitePath = VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE; crits.add(Criteria.where(startSitePath).gte(displayedRangeMin + (i * intervalSize))); if (i < displayedRangeIntervalCount - 1) crits.add(Criteria.where(startSitePath).lt(displayedRangeMin + ((i + 1) * intervalSize))); else crits.add(Criteria.where(startSitePath).lte(displayedRangeMax)); final Query query = new Query(new Criteria().andOperator(crits.toArray(new Criteria[crits.size()]))); final long chunkIndex = i; Thread t = new Thread() { public void run() { if (!finalProgress.hasAborted()) { long partialCount = mongoTemplate.count(query, actualCollectionName); nTotalTreatedVariantCountfinishedThreadCount.addAndGet((int) partialCount); result.put(rangeMin + (chunkIndex * intervalSize), partialCount); finalProgress.setCurrentStepProgress((short) (finishedThreadCount.incrementAndGet() * 100 / displayedRangeIntervalCount)); } } }; if (chunkIndex % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS - 1)) t.run(); // run synchronously else { threadsToWaitFor.add(t); t.start(); // run asynchronously for better speed } } if (progress.hasAborted()) return null; for (Thread t : threadsToWaitFor) // wait for all threads before moving to next phase t.join(); progress.setCurrentStepProgress(100); LOG.debug("selectionDensity treated " + nTotalTreatedVariantCountfinishedThreadCount.get() + " variants in " + (System.currentTimeMillis() - before) / 1000f + "s"); progress.markAsComplete(); return new TreeMap<Long, Long>(result); }
From source file:GeoHazardServices.Inst.java
License:Apache License
private String _computeById(User user, String evtid, Integer dur, Integer accel, Integer gridres, String algo) { DBObject eq = db.getCollection("eqs").findOne(new BasicDBObject("_id", evtid)); if (eq == null) return null; BasicDBObject process = new BasicDBObject("process", new BasicDBList()); BasicDBObject set = new BasicDBObject("$set", process); db.getCollection("eqs").update(eq, set); /* extract properties to pass them to the request method */ BasicDBObject prop = (BasicDBObject) eq.get("prop"); double lat = prop.getDouble("latitude"); double lon = prop.getDouble("longitude"); double dip = prop.getDouble("dip"); double strike = prop.getDouble("strike"); double rake = prop.getDouble("rake"); double depth = prop.getDouble("depth"); Date date = prop.getDate("date"); EQParameter eqp;/*from w ww . j av a 2 s . c o m*/ double mag = 0.0; double slip = 0.0; double length = 0.0; double width = 0.0; if (prop.get("magnitude") == null) { slip = prop.getDouble("slip"); length = prop.getDouble("length"); width = prop.getDouble("width"); eqp = new EQParameter(lon, lat, slip, length, width, depth, dip, strike, rake, date); } else { mag = prop.getDouble("magnitude"); eqp = new EQParameter(lon, lat, mag, depth, dip, strike, rake, date); } if (accel == null) accel = 1; /* start request */ EQTask task = new EQTask(eqp, evtid, user, dur, accel, gridres); task.algo = algo; task.setSlots(IScheduler.SLOT_NORMAL, IScheduler.SLOT_EXCLUSIVE); return request(evtid, task); }
From source file:GeoHazardServices.Inst.java
License:Apache License
@POST @Path("/computeById") @Produces(MediaType.APPLICATION_JSON)/*from w w w.j a v a 2 s .c o m*/ public String computeById(@Context HttpServletRequest request, @FormParam("inst") String inst, @FormParam("secret") String secret, @FormParam("id") String id, @FormParam("refineId") Long refineId, @FormParam("dur") Integer dur, @FormParam("accel") Integer accel, @FormParam("apikey") String apikey, @FormParam("evtid") String evtid, @FormParam("raw") @DefaultValue("0") Integer raw, @FormParam("gridres") Integer gridres, @FormParam("dt_out") @DefaultValue("10") Integer dt_out, @FormParam("algo") @DefaultValue("easywave") String algo) { /* Check for invalid parameter configurations. */ if ((inst != null || secret != null) && apikey != null) return jsfailure("Don't mix 'apikey' and 'secret'."); /* Support 'inst' and 'secret' for compatibility reasons. */ if (inst != null && secret != null) { /* Obtain the 'apikey' and pretend a call to the new api. */ DBObject query = new BasicDBObject("name", inst).append("secret", secret); DBObject tmp_inst = db.getCollection("institutions").findOne(query); if (tmp_inst == null) return jsdenied(); apikey = (String) ((DBObject) tmp_inst.get("api")).get("key"); if (apikey == null) return jsfailure("No 'apikey' set for this institution!"); } /* Authenticate user. */ DBObject db_user = auth_api(apikey, "user"); DBObject db_inst = auth_api(apikey, "inst"); User user; if (db_user != null) { user = new User(db_user, getInst(db_user)); } else if (db_inst != null) { user = new Inst(db_inst); } else { return jsdenied(); } /* Check for invalid parameter configurations. */ if ((id != null || refineId != null) && evtid != null) return jsfailure("Don't mix 'id' and 'evtid'."); if (evtid == null) evtid = new CompId(inst, id, refineId).toString(); /* Check for missing parameters */ if (evtid == null) return jsfailure("Missing parameter."); /* search for given id */ BasicDBObject query = new BasicDBObject("_id", evtid).append("user", user.objId); DBObject entry = db.getCollection("eqs").findOne(query); /* return if id not found */ if (entry == null) return jsfailure("Event ID not found."); /* check if already computed */ Integer progress = _status(evtid, raw); if (progress != STATUS_NO_COMP) { if (raw == 0) return jsfailure("Re-computation not allowed."); if (progress != 100) return jsfailure("A computation is currently running."); } /* Use same duration as in original simulation if available. */ if (dur == null) { Number n = (Number) getField(entry, "process.0.simTime"); /* Duration could not be determined. */ if (n == null) return jsfailure("Missing parameter."); dur = n.intValue(); } /* Use grid resolution of original computation or default to 120 seconds. */ if (gridres == null) { Number res = (Number) getField(entry, "process.0.resolution"); gridres = res == null ? 120 : (int) (res.doubleValue() * 60); } /* get properties of returned entry */ BasicDBObject prop = (BasicDBObject) entry.get("prop"); BasicDBObject process = new BasicDBObject("raw_progress", 0); if (raw == 0) { process.append("process", new BasicDBList()); } BasicDBObject set = new BasicDBObject("$set", process); db.getCollection("eqs").update(entry, set); /* extract properties to pass them to the request method */ double lat = prop.getDouble("latitude"); double lon = prop.getDouble("longitude"); double mag = prop.getDouble("magnitude"); double dip = prop.getDouble("dip"); double strike = prop.getDouble("strike"); double rake = prop.getDouble("rake"); double depth = prop.getDouble("depth"); Date date = prop.getDate("date"); if (accel == null) accel = 1; /* prepare the simulation for execution */ EQParameter eqp = new EQParameter(lon, lat, mag, depth, dip, strike, rake, date); EQTask task = new EQTask(eqp, evtid, user, dur, accel, gridres); task.raw = raw; task.dt_out = dt_out; task.algo = algo; String ret_id = request(evtid, task); return jssuccess(new BasicDBObject("_id", ret_id)); }
From source file:GeoHazardServices.Inst.java
License:Apache License
private DBObject getUserObj(String username) { DBCollection coll = db.getCollection("users"); DBCursor cursor = coll.find(new BasicDBObject("username", username)); if (!cursor.hasNext()) return null; DBObject obj = cursor.next();//from w w w.jav a2 s . c o m cursor.close(); BasicDBObject userObj = new BasicDBObject("username", obj.get("username")); userObj.put("_id", obj.get("_id")); userObj.put("permissions", obj.get("permissions")); userObj.put("properties", obj.get("properties")); userObj.put("notify", obj.get("notify")); userObj.put("api", obj.get("api")); ObjectId instId = (ObjectId) obj.get("inst"); cursor = db.getCollection("institutions").find(new BasicDBObject("_id", instId)); String instName = null; if (cursor.hasNext()) { DBObject inst = cursor.next(); inst.removeField("_id"); inst.removeField("secret"); userObj.put("inst", inst); instName = (String) inst.get("name"); } cursor.close(); if (instName == null || instName.equals("gfz") || instName.equals("tdss15")) instName = "gfz_ex_test"; /* get all available country codes and count elements in each group */ DBObject groupFields = new BasicDBObject("_id", "$country"); groupFields.put("count", new BasicDBObject("$sum", 1)); DBObject group = new BasicDBObject("$group", groupFields); BasicDBList types = new BasicDBList(); types.add(new BasicDBObject("sensor", "rad")); types.add(new BasicDBObject("sensor", "prs")); types.add(new BasicDBObject("sensor", "pr1")); types.add(new BasicDBObject("sensor", "flt")); types.add(new BasicDBObject("sensor", null)); DBObject filterFields = new BasicDBObject("$or", types); BasicDBList andList = new BasicDBList(); andList.add(filterFields); andList.add(new BasicDBObject("inst", instName)); DBObject andObj = new BasicDBObject("$and", andList); DBObject filter = new BasicDBObject("$match", andObj); /* sort alphabetically */ DBObject sortFields = new BasicDBObject("_id", 1); DBObject sort = new BasicDBObject("$sort", sortFields); AggregationOutput output = db.getCollection("stations").aggregate(filter, group, sort); BasicDBList countries = new BasicDBList(); /* convert answer into string list */ @SuppressWarnings("unchecked") List<String> clist = (List<String>) obj.get("countries"); for (DBObject res : output.results()) { String code = (String) res.get("_id"); if (code == null) continue; boolean isOn = (clist != null) && clist.contains(code); res.put("on", isOn); countries.add(res); } userObj.put("countries", countries); return userObj; }
From source file:GeoHazardServices.Inst.java
License:Apache License
@POST @Path("/update") @Produces(MediaType.APPLICATION_JSON)//from www.ja v a 2 s.c om public String update(@Context HttpServletRequest request, @FormParam("ts") String ts, @FormParam("delay") @DefaultValue("0") int delay, @CookieParam("server_cookie") String session) { /* check session key and find out if the request comes from an authorized user */ User user = signedIn(session); /* create lists for general and user specific earthquake entries */ ArrayList<DBObject> mlist = new ArrayList<DBObject>(); ArrayList<DBObject> ulist = new ArrayList<DBObject>(); ArrayList<DBObject> evtsets = new ArrayList<DBObject>(); /* used to convert to desired time format used by MongoDB */ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); sdf.setTimeZone(TimeZone.getTimeZone("UTC")); /* convert timestamp from String to Date; return on error */ Date timestamp; try { timestamp = sdf.parse(ts); } catch (ParseException e) { e.printStackTrace(); return null; } /* select collection which contain the events */ DBCollection coll = db.getCollection("events"); /* create list of DB objects that contains all desired users */ BasicDBList users = new BasicDBList(); // for( User curUser: institutions.values() ) // users.add( new BasicDBObject( "user", curUser.objId ) ); if (user != null) { users.add(new BasicDBObject("user", user.objId)); if (user.inst != null) { users.add(new BasicDBObject("user", institutions.get(user.inst).objId)); } else { users.add(new BasicDBObject("user", institutions.get("gfz").objId)); } DBCursor csr = db.getCollection("users").find( new BasicDBObject("username", user.name).append("provider", new BasicDBObject("$ne", null))); if (csr.hasNext()) { for (Object p : (BasicDBList) csr.next().get("provider")) { users.add(new BasicDBObject("user", p)); } } } /* return only entries that are older than 'delay' minutes */ Date upperTimeLimit = new Date(System.currentTimeMillis() - delay * 60 * 1000); /* create DB query - search for newer events related to the general list or the user */ BasicDBList time = new BasicDBList(); time.add(new BasicDBObject("timestamp", new BasicDBObject("$gt", timestamp))); BasicDBObject inQuery = new BasicDBObject("$and", time); inQuery.put("$or", users); boolean first = true; Map<String, List<DBObject>> lists = new HashMap<String, List<DBObject>>(); for (Map.Entry<String, IDataProvider> entry : providers.entrySet()) { lists.put(entry.getKey(), new ArrayList<DBObject>()); } /* walk through the returned entries */ if (user != null) { /* query DB, sort the results by timestamp */ DBCursor cursor = coll.find(inQuery).sort(new BasicDBObject("timestamp", -1)); for (DBObject obj : cursor) { if (first) { timestamp = (Date) obj.get("timestamp"); first = false; } /* get corresponding entry from earthquake collection */ String id = (String) obj.get("id"); BasicDBObject objQuery = new BasicDBObject(); objQuery.put("olduser", new BasicDBObject("$exists", false)); if (delay > 0) objQuery.put("prop.date", new BasicDBObject("$lt", upperTimeLimit)); DBObject obj2 = null; if (obj.get("event").equals("msg_sent")) { objQuery.put("Message-ID", id); obj2 = db.getCollection("messages_sent").findOne(objQuery); } else if (obj.get("event").equals("msg_recv")) { objQuery.put("Message-ID", id); obj2 = db.getCollection("messages_received").findOne(objQuery); } else if (obj.get("event").equals("new_evtset")) { objQuery.put("_id", id); obj2 = db.getCollection("evtsets").findOne(objQuery); } else { objQuery.put("_id", id); obj2 = db.getCollection("eqs").findOne(objQuery); if (obj2 == null) obj2 = db.getCollection("evtsets").findOne(objQuery); } for (Map.Entry<String, IDataProvider> entry : providers.entrySet()) { entry.getValue().add(lists.get(entry.getKey()), obj); } /* */ if (obj2 != null) { /* add event type to entry */ String event = (String) obj.get("event"); obj2.put("event", event); if (obj.get("event").equals("msg_recv")) { obj2.put("Dir", "in"); obj2.put("To", new String[] { user.name }); DBCursor csrUser = db.getCollection("users") .find(new BasicDBObject("_id", obj2.get("SenderID"))); if (csrUser.hasNext()) obj2.put("From", (String) csrUser.next().get("username")); DBCursor csrParent = db.getCollection("eqs") .find(new BasicDBObject("_id", obj2.get("ParentId"))); if (csrParent.hasNext()) obj2.put("parentEvt", csrParent.next()); } if (event.equals("new_evtset")) { evtsets.add(obj2); } else { /* check if entry belongs to general or user specific list */ if (user != null && obj.get("user").equals(user.objId)) { ulist.add(obj2); } else { mlist.add(obj2); } } /* update timestamp */ /* TODO: this is just a temporary solution, because progress events could be delivered multiple times */ if (delay <= 0 || event.equals("new")) { if (first) { timestamp = (Date) obj.get("timestamp"); first = false; } } } } /* clean up query */ cursor.close(); } /* create new JSON object that can be used directly within JavaScript */ JsonObject jsonObj = new JsonObject(); jsonObj.addProperty("serverTime", sdf.format(new Date())); jsonObj.addProperty("ts", sdf.format(timestamp)); jsonObj.add("main", gson.toJsonTree(mlist)); jsonObj.add("user", gson.toJsonTree(ulist)); jsonObj.add("evtsets", gson.toJsonTree(evtsets)); for (Map.Entry<String, IDataProvider> entry : providers.entrySet()) { jsonObj.add(entry.getKey(), gson.toJsonTree(lists.get(entry.getKey()))); } return jsonObj.toString(); }
From source file:GeoHazardServices.Inst.java
License:Apache License
@POST @Path("/search") @Produces(MediaType.APPLICATION_JSON)//from w ww . j a v a 2s .c o m public String search(@Context HttpServletRequest request, @FormParam("text") String text, @CookieParam("server_cookie") String session) { /* check session key and find out if the request comes from an authorized user */ User user = signedIn(session); /* create list of DB objects that contains all desired users */ BasicDBList users = new BasicDBList(); for (User curUser : institutions.values()) users.add(new BasicDBObject("user", curUser.objId)); if (user != null) users.add(new BasicDBObject("user", user.objId)); DBCollection coll = db.getCollection("eqs"); DBCollection msgColl = db.getCollection("messages_sent"); DBCollection recvColl = db.getCollection("messages_received"); DBCollection evtsetColl = db.getCollection("evtsets"); List<DBObject> refinements = coll.find(new BasicDBObject("id", text)).toArray(); BasicDBList list = new BasicDBList(); list.add(new BasicDBObject("_id", text)); list.add(new BasicDBObject("id", text)); list.add(new BasicDBObject("root", text)); list.add(new BasicDBObject("parent", text)); for (DBObject obj : refinements) { String compId = (String) obj.get("_id"); list.add(new BasicDBObject("root", compId)); list.add(new BasicDBObject("parent", compId)); } BasicDBList and = new BasicDBList(); and.add(new BasicDBObject("$or", list)); and.add(new BasicDBObject("$or", users)); BasicDBObject inQuery = new BasicDBObject("$and", and); BasicDBObject sort = new BasicDBObject("timestamp", -1); sort.put("prop.date", -1); DBCursor cursor = coll.find(inQuery).sort(sort); List<DBObject> results = new ArrayList<DBObject>(); results.addAll(cursor.toArray()); cursor.close(); /* TODO: generalize field names */ list = new BasicDBList(); list.add(new BasicDBObject("EventID", text)); list.add(new BasicDBObject("ParentId", text)); for (DBObject obj : refinements) { String compId = (String) obj.get("_id"); list.add(new BasicDBObject("EventID", compId)); list.add(new BasicDBObject("ParentId", compId)); } and = new BasicDBList(); and.add(new BasicDBObject("$or", list)); and.add(new BasicDBObject("SenderID", user.objId)); inQuery = new BasicDBObject("$and", and); cursor = msgColl.find(inQuery).sort(new BasicDBObject("CreatedTime", -1)); for (DBObject obj : cursor) { obj.put("kind", "msg"); obj.put("Dir", "out"); results.add(obj); } cursor.close(); and = new BasicDBList(); and.add(new BasicDBObject("$or", list)); and.add(new BasicDBObject("ReceiverID", user.objId)); inQuery = new BasicDBObject("$and", and); cursor = recvColl.find(inQuery).sort(new BasicDBObject("CreatedTime", -1)); for (DBObject obj : cursor) { obj.put("kind", "msg"); obj.put("Dir", "in"); results.add(obj); } cursor.close(); DBObject evtset = evtsetColl.findOne(new BasicDBObject("_id", text)); if (evtset != null) { List<DBObject> evts = coll.find(new BasicDBObject("id", new BasicDBObject("$in", evtset.get("evtids")))) .toArray(); results.addAll(evts); } /* returning only cursor.toArray().toString() makes problems with the date fields */ return gson.toJsonTree(results).toString(); }
From source file:gr.ntua.ivml.awareness.persistent.DigitalStory.java
License:Creative Commons License
public DBObject getBsonForSolr() { try {/* w w w . j av a 2 s . c o m*/ DBObject bsonDs = MongoDB.getMorphia().getMapper().toDBObject(this); List<StoryObject> lso = MongoDB.getStoryObjectDAO().getStoryObjectsByPlaceHolders(getStoryObjects()); BasicDBList bsonStories = new BasicDBList(); for (StoryObject so : lso) { if (so != null) { bsonStories.add(so.getBsonForSolr()); } } bsonDs.removeField("storyObjects"); bsonDs.put("storyObjects", bsonStories); String themeId = getTheme(); if (themeId != null && themeId.length() > 0) { // replace theme link with json Theme th = MongoDB.getThemeDAO().get(new ObjectId(themeId)); BSONObject themeJson = th.getBsonForSolr(); bsonDs.removeField("theme"); bsonDs.put("theme", themeJson); } return bsonDs; } catch (Exception e) { log.error("Couldnt get BSON from Morphia", e); return new BasicDBObject(); } }
From source file:guardar.en.base.de.datos.SaxManejadorIndice.java
@Override public void endElement(String uri, String localName, String qName) throws SAXException { super.endElement(uri, localName, qName); //To change body of generated methods, choose Tools | Templates. switch (qName) { case "title": break;//w w w. j a va 2s . c o m case "text": documento++; //cambio el buffer a la variable texto String texto = buffer.toString(); texto = quitarBasura(texto); // le quito las stopwords al texto texto = quitarStop(texto, this.listaStopWords); //mando el texto a separarse ArrayList<String> texto_separado = new ArrayList<>(); try { texto_separado = textoArray(texto); } catch (IOException ex) { Logger.getLogger(SaxManejadorIndice.class.getName()).log(Level.SEVERE, null, ex); } texto = ""; ArrayList<SuperClase> nuevo = new ArrayList<>(); nuevo = fn(texto_separado); //libero //texto_sin_stopwords.clear(); texto_separado.clear(); //calculo las frecuencias de las palabras nuevo = calcularFrecuencias(nuevo); //ingreso a la base de datos for (int i = 0; i < nuevo.size(); i++) { contador++; //creo la lista de documentos que al principio va a ser uno BasicDBList lista = new BasicDBList(); lista.add(new BasicDBObject("documento", this.documento).append("frecuencia", nuevo.get(i).frecuencia)); // luego creo el objeto db con id, palabra y una lista de docs DBObject document = new BasicDBObject("id", contador).append("palabra", nuevo.get(i).palabra) .append("documentos", lista); //documents.add(document); //*********Buscar************************************ // Solo se agrega el archivo cuando se encuentra una palabra que ya esta ingresada //sin embargo es muy lento BasicDBObject query = new BasicDBObject("palabra", nuevo.get(i).palabra); try (DBCursor c = collection.find(query)) { int exist = 0; if (c.hasNext()) { //si existe solo agrego el archivo del que proviene DBObject o = new BasicDBObject("documentos", new BasicDBObject("documento", this.documento) .append("frecuencia", nuevo.get(i).frecuencia)); DBObject updateQuery = new BasicDBObject("$push", o); collection.update(query, updateQuery); exist = 1; //System.out.print(c.next()); } if (exist == 0) { // agrego a la base de datos this.collection.insert(document); } } lista.clear(); //************************************************* //this.collection.insert(document); } System.out.println("Documento " + this.documento + " procesado"); nuevo.clear(); break; } }
From source file:homework.week3.course.BlogPostDAO.java
License:Apache License
public String addPost(String title, String body, List tags, String username) { System.out.println("inserting blog entry " + title + " " + body); String permalink = title.replaceAll("\\s", "_"); // whitespace becomes _ permalink = permalink.replaceAll("\\W", ""); // get rid of non // alphanumeric permalink = permalink.toLowerCase(); BasicDBObject post = new BasicDBObject(); // XXX HW 3.2, Work Here // Remember that a valid post has the following keys: // author, body, permalink, tags, comments, date ////from www .j a va 2 s. c o m // A few hints: // - Don't forget to create an empty list of comments // - for the value of the date key, today's datetime is fine. // - tags are already in list form that implements suitable interface. // - we created the permalink for you above. // Build the post object and insert it post = post.append("title", title).append("author", username).append("tags", tags).append("body", body) .append("permalink", permalink).append("date", new Date()).append("comments", new BasicDBList()); postsCollection.save(post); return permalink; }