List of usage examples for com.mongodb DBCollection getName
public String getName()
From source file:de.flapdoodle.mongoom.datastore.Indexes.java
License:Apache License
public static void ensureIndex(DB db, IndexDef index, String collectionName) { // public <T> void ensureIndex(Class<T> clazz, String name, // Set<IndexFieldDef> defs, boolean unique, // boolean dropDupsOnCreate) { BasicDBObjectBuilder keys = BasicDBObjectBuilder.start(); BasicDBObjectBuilder keyOpts = null; List<FieldIndex> indexSorted = Lists.newArrayList(index.fields()); Collections.sort(indexSorted, new Comparator<FieldIndex>() { @Override// w w w. jav a 2 s . co m public int compare(FieldIndex o1, FieldIndex o2) { if (o1.priority() == o2.priority()) return 0; if (o1.priority() < o2.priority()) return 1; return -1; } }); for (FieldIndex def : indexSorted) { String fieldName = def.name(); Direction dir = def.direction(); if (dir == Direction.BOTH) keys.add(fieldName, 1).add(fieldName, -1); else keys.add(fieldName, (dir == Direction.ASC) ? 1 : -1); } String name = index.name(); if (name != null && !name.isEmpty()) { if (keyOpts == null) keyOpts = new BasicDBObjectBuilder(); keyOpts.add("name", name); } if (index.unique()) { if (keyOpts == null) keyOpts = new BasicDBObjectBuilder(); keyOpts.add("unique", true); if (index.dropDups()) keyOpts.add("dropDups", true); } if (index.sparse()) { if (keyOpts == null) keyOpts = new BasicDBObjectBuilder(); keyOpts.add("sparse", true); } try { db.requestStart(); DBCollection dbColl = db.getCollection(collectionName); DBObject indexKeys = keys.get(); // DatastoreImpl._logger.info("Ensuring index for " + dbColl.getName() + "." + index + " with keys " + indexKeys); if (keyOpts == null) { _logger.info("Ensuring index for " + dbColl.getName() + "." + index + " with keys " + indexKeys); dbColl.ensureIndex(indexKeys); } else { DBObject options = keyOpts.get(); _logger.info("Ensuring index for " + dbColl.getName() + "." + index + " with keys " + indexKeys + " and opts " + options); dbColl.ensureIndex(indexKeys, options); } } finally { Errors.checkError(db, Operation.Insert); db.requestDone(); } }
From source file:eu.eubrazilcc.lvl.storage.mongodb.MongoDBConnector.java
License:EUPL
/** * Runs a map-reduce aggregation over a collection and saves the result to a temporary collection, which is deleted at the end * of the execution. The results of the operation are returned to the caller in a list of {@code BasicDBObject}. * @param collection - collection whose objects are searched * @param mapFn - map function//from www.j a v a 2s . c o m * @param reduceFn - reduce function * @param query - (optional) specifies the selection criteria using query operators for determining the documents input to the * map function. Set this parameter to {@code null} to use all documents in the collection * @return a list of {@code BasicDBObject} which contains the results of this map reduce operation. * @see <a href="http://cookbook.mongodb.org/patterns/pivot/">The MongoDB Cookbook - Pivot Data with Map reduce</a> */ public List<BasicDBObject> mapReduce(final String collection, final String mapFn, final String reduceFn, final @Nullable DBObject query) { checkArgument(isNotBlank(collection), "Uninitialized or invalid collection"); checkArgument(isNotBlank(mapFn), "Uninitialized or map function"); checkArgument(isNotBlank(reduceFn), "Uninitialized or reduce function"); final List<BasicDBObject> list = newArrayList(); final DB db = client().getDB(CONFIG_MANAGER.getDbName()); final DBCollection dbcol = db.getCollection(collection); final DBCollection tmpCol = tmpCollection(); try { final MapReduceCommand command = new MapReduceCommand(dbcol, mapFn, reduceFn, tmpCol.getName(), REDUCE, query); final MapReduceOutput output = dbcol.mapReduce(command); final Iterable<DBObject> results = output.results(); for (final DBObject result : results) { list.add((BasicDBObject) result); } } catch (Exception e) { throw new IllegalStateException("Failed to execute map-reduce operation", e); } finally { try { tmpCol.drop(); } catch (Exception mdbe) { LOGGER.warn("Failed to drop temporary collection", mdbe); } } return list; }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * This method returns the number of variants that match provided parameters. * * @param request the request/*from ww w .j a va 2 s. com*/ * @param sModule the module * @param projId the proj id * @param selectedVariantTypes the selected variant types * @param selectedSequences the selected sequences * @param selectedIndividuals the selected individuals * @param gtPattern the gt code * @param genotypeQualityThreshold the genotype quality threshold * @param readDepthThreshold the read depth threshold * @param missingData the missing data * @param minmaf the minmaf * @param maxmaf the maxmaf * @param minposition the minposition * @param maxposition the maxposition * @param alleleCount the allele count * @param geneName the gene name * @param variantEffects the variant effects * @param processID the process id * @return the long * @throws Exception the exception */ @RequestMapping(variantCountURL) protected @ResponseBody long countVariants(HttpServletRequest request, @RequestParam("module") String sModule, @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes, @RequestParam("sequences") String selectedSequences, @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern, @RequestParam("genotypeQualityThreshold") Integer genotypeQualityThreshold, @RequestParam("readDepthThreshold") Integer readDepthThreshold, @RequestParam("missingData") Double missingData, @RequestParam(value = "minmaf", required = false) Float minmaf, @RequestParam(value = "maxmaf", required = false) Float maxmaf, @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects, @RequestParam("processID") final String processID) throws Exception { final ProgressIndicator progress = new ProgressIndicator(processID.substring(1 + processID.indexOf('|')), new String[0]); ProgressIndicator.registerProgressIndicator(progress); DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, progress.getProcessId(), true /*empty it*/); try { String queryKey = getQueryKey(request, sModule, projId, selectedVariantTypes, selectedSequences, selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects); final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); DBCollection cachedCountcollection = mongoTemplate.getCollection(MgdbDao.COLLECTION_NAME_CACHED_COUNTS); // cachedCountcollection.drop(); DBCursor countCursor = cachedCountcollection.find(new BasicDBObject("_id", queryKey)); Long count = null; if (countCursor.hasNext()) { count = 0l; for (Object aPartialCount : ((BasicDBList) countCursor.next() .get(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE)).toArray()) count += (Long) aPartialCount; } LOG.debug((count == null ? "new" : "existing") + " queryKey hash: " + queryKey); if (count == null) { long before = System.currentTimeMillis(); progress.addStep("Counting matching variants"); String sRegexOrAggregationOperator = GenotypingDataQueryBuilder.getGenotypePatternToQueryMap() .get(gtPattern); List<String> alleleCountList = alleleCount.length() == 0 ? null : Arrays.asList(alleleCount.split(";")); GenotypingProject genotypingProject = mongoTemplate.findById(projId, GenotypingProject.class); if (genotypingProject.getAlleleCounts().size() != 1 || genotypingProject.getAlleleCounts().iterator().next() != 2) { // Project does not only have bi-allelic data: make sure we can apply MAF filter on selection boolean fExactlyOneNumberOfAllelesSelected = alleleCountList != null && alleleCountList.size() == 1; boolean fBiAllelicSelected = fExactlyOneNumberOfAllelesSelected && "2".equals(alleleCountList.get(0)); boolean fMafRequested = (maxmaf != null && maxmaf < 50) || (minmaf != null && minmaf > 0); if (fMafRequested && !fBiAllelicSelected) { progress.setError("MAF is only supported on biallelic data!"); return 0l; } } String actualSequenceSelection = selectedSequences; if (actualSequenceSelection.length() == 0) { ArrayList<String> externallySelectedSeqs = getSequenceIDsBeingFilteredOn(request, sModule); if (externallySelectedSeqs != null) actualSequenceSelection = StringUtils.join(externallySelectedSeqs, ";"); } boolean fNeedToFilterOnGenotypingData = needToFilterOnGenotypingData(sModule, projId, sRegexOrAggregationOperator, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, geneName, variantEffects); BasicDBList variantQueryDBList = buildVariantDataQuery(sModule, projId, selectedVariantTypes.length() == 0 ? null : Arrays.asList(selectedVariantTypes.split(";")), actualSequenceSelection.length() == 0 ? null : Arrays.asList(actualSequenceSelection.split(";")), minposition, maxposition, alleleCountList); if (variantQueryDBList.isEmpty()) { if (!fNeedToFilterOnGenotypingData && mongoTemplate.count(null, GenotypingProject.class) == 1) count = mongoTemplate.count(new Query(), VariantData.class); // no filter whatsoever } else { if (!fNeedToFilterOnGenotypingData) { // filtering on variant features only: we just need a count count = mongoTemplate.getCollection(mongoTemplate.getCollectionName(VariantData.class)) .count(new BasicDBObject("$and", variantQueryDBList)); } else { // filtering on variant features and genotyping data: we need a list of variant IDs to restrict the genotyping data search to long beforeAggQuery = System.currentTimeMillis(); progress.setProgressDescription("Filtering variants for count..."); DBCollection variantColl = mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantData.class)); List<DBObject> pipeline = new ArrayList<DBObject>(); pipeline.add(new BasicDBObject("$match", new BasicDBObject("$and", variantQueryDBList))); BasicDBObject projectObject = new BasicDBObject("_id", "$_id"); projectObject.put( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE, "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE); projectObject.put( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE, "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE); projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE); projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST, "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST); pipeline.add(new BasicDBObject("$project", projectObject)); pipeline.add(new BasicDBObject("$out", tmpVarColl.getName())); variantColl.aggregate(pipeline); mongoTemplate.getDb().setWriteConcern(WriteConcern.ACKNOWLEDGED); LOG.debug("Variant preliminary query found " + tmpVarColl.count() + " results in " + (System.currentTimeMillis() - beforeAggQuery) / 1000f + "s"); progress.setProgressDescription(null); if (tmpVarColl.count() == 0) count = 0l; // no need to search any further } } if (count != null) { BasicDBObject dbo = new BasicDBObject("_id", queryKey); dbo.append(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE, new Long[] { count }); cachedCountcollection.save(dbo); } else { // now filter on genotyping data List<String> selectedIndividualList = selectedIndividuals.length() == 0 ? null : Arrays.asList(selectedIndividuals.split(";")); if (selectedIndividualList == null) selectedIndividualList = getIndividualsInDbOrder(sModule, projId); GigwaSearchVariantsExportRequest gsvr = new GigwaSearchVariantsExportRequest(); gsvr.setAlleleCount(alleleCount); if (minposition != null) gsvr.setStart(minposition); if (maxposition != null) gsvr.setEnd(maxposition); gsvr.setGeneName(geneName); gsvr.setReferenceName(selectedSequences); gsvr.setSelectedVariantTypes(selectedVariantTypes); gsvr.setVariantEffect(variantEffects); gsvr.setVariantSetId(sModule + ServiceInterface.ID_SEPARATOR + projId); gsvr.setMissingData(missingData); gsvr.setMinmaf(minmaf); gsvr.setMaxmaf(maxmaf); gsvr.setGtPattern(gtPattern); HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>(); annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold); annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold); gsvr.setAnnotationFieldThresholds(annotationFieldThresholds); gsvr.setCallSetIds(selectedIndividualList); GenotypingDataQueryBuilder genotypingDataQueryBuilder = new GenotypingDataQueryBuilder(gsvr, tmpVarColl); try { final int nChunkCount = genotypingDataQueryBuilder.getNumberOfQueries(); if (nChunkCount > 1) LOG.debug("Query split into " + nChunkCount); final Long[] partialCountArray = new Long[nChunkCount]; final Builder aggOpts = AggregationOptions.builder().allowDiskUse(false); final ArrayList<Thread> threadsToWaitFor = new ArrayList<Thread>(); final AtomicInteger finishedThreadCount = new AtomicInteger(0); ArrayList<List<DBObject>> genotypingDataPipelines = new ArrayList(); while (genotypingDataQueryBuilder.hasNext()) genotypingDataPipelines.add(genotypingDataQueryBuilder.next()); ArrayList<Integer> chunkIndices = new ArrayList<Integer>(); for (int i = 0; i < genotypingDataPipelines.size(); i++) chunkIndices.add(i); Collections.shuffle(chunkIndices); for (int i = 0; i < chunkIndices.size()/*/2*/; i++) { final List<DBObject> genotypingDataPipeline = genotypingDataPipelines .get(chunkIndices.get(i)); // Now the $group operation, used for counting DBObject groupFields = new BasicDBObject("_id", null); groupFields.put("count", new BasicDBObject("$sum", 1)); genotypingDataPipeline.add(new BasicDBObject("$group", groupFields)); if (i == 0 && tmpVarColl.count() <= 5) LOG.debug(genotypingDataPipeline); if (progress.hasAborted()) { genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain return 0l; } final int chunkIndex = i; Thread t = new Thread() { public void run() { // long b4 = System.currentTimeMillis(); Cursor it = mongoTemplate .getCollection(MongoTemplateManager .getMongoCollectionName(VariantRunData.class)) .aggregate(genotypingDataPipeline, aggOpts.build()); partialCountArray[chunkIndex] = it.hasNext() ? ((Number) it.next().get("count")).longValue() : 0; progress.setCurrentStepProgress( (short) (finishedThreadCount.incrementAndGet() * 100 / nChunkCount)); // System.out.println("chunk " + chunkIndex + " took " + (System.currentTimeMillis() - b4)); genotypingDataPipeline.clear(); // release memory (VERY IMPORTANT) } }; if (i % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS - 1)) { t.run(); // run synchronously } else { threadsToWaitFor.add(t); t.start(); // run asynchronously for better speed } } for (Thread t : threadsToWaitFor) // wait for all threads before moving to next phase t.join(); progress.setCurrentStepProgress(100); count = 0l; for (Long partialCount : partialCountArray) count += partialCount; BasicDBObject dbo = new BasicDBObject("_id", queryKey); dbo.append(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE, partialCountArray); cachedCountcollection.save(dbo); } catch (Exception e) { genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain throw e; } } LOG.info("countVariants found " + count + " results in " + (System.currentTimeMillis() - before) / 1000d + "s"); } progress.markAsComplete(); if (progress.hasAborted()) return 0l; return count; } finally { // getTemporaryVariantCollection(sModule, progress.getProcessId(), true); // always empty it } }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * Find variants.//from w ww . j a v a2 s . co m * * @param request the request * @param sModule the module * @param projId the proj id * @param selectedVariantTypes the selected variant types * @param selectedSequences the selected sequences * @param selectedIndividuals the selected individuals * @param gtPattern the gt code * @param genotypeQualityThreshold the genotype quality threshold * @param readDepthThreshold the read depth threshold * @param missingData the missing data * @param minmaf the minmaf * @param maxmaf the maxmaf * @param minposition the minposition * @param maxposition the maxposition * @param alleleCount the allele count * @param geneName the gene name * @param variantEffects the variant effects * @param wantedFields the wanted fields * @param page the page * @param size the size * @param sortBy the sort by * @param sortDir the sort dir * @param processID the process id * @return true, if successful * @throws Exception the exception */ @RequestMapping(variantFindURL) /** * This method build a list of variants in a temporary collection, that may be used later for browsing or exporting results */ protected @ResponseBody boolean findVariants(HttpServletRequest request, @RequestParam("module") String sModule, @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes, @RequestParam("sequences") String selectedSequences, @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern, @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold, @RequestParam("readDepthThreshold") int readDepthThreshold, @RequestParam("missingData") double missingData, @RequestParam("minmaf") Float minmaf, @RequestParam("maxmaf") Float maxmaf, @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects, @RequestParam("wantedFields") String wantedFields, @RequestParam("page") int page, @RequestParam("size") int size, @RequestParam("sortBy") String sortBy, @RequestParam("sortDir") String sortDir, @RequestParam("processID") String processID) throws Exception { long before = System.currentTimeMillis(); String token = processID.substring(1 + processID.indexOf('|')); final ProgressIndicator progress = new ProgressIndicator(token, new String[0]); ProgressIndicator.registerProgressIndicator(progress); progress.addStep("Loading results"); String actualSequenceSelection = selectedSequences; if (actualSequenceSelection.length() == 0) { ArrayList<String> externallySelectedSeqs = getSequenceIDsBeingFilteredOn(request, sModule); if (externallySelectedSeqs != null) actualSequenceSelection = StringUtils.join(externallySelectedSeqs, ";"); } List<String> selectedSequenceList = actualSequenceSelection.length() == 0 ? null : Arrays.asList(actualSequenceSelection.split(";")); String queryKey = getQueryKey(request, sModule, projId, selectedVariantTypes, selectedSequences, selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects); final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); DBCollection cachedCountCollection = mongoTemplate.getCollection(MgdbDao.COLLECTION_NAME_CACHED_COUNTS); DBCursor countCursor = cachedCountCollection.find(new BasicDBObject("_id", queryKey)); final DBCollection variantColl = mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantData.class)); final Object[] partialCountArray = !countCursor.hasNext() ? null : ((BasicDBList) countCursor.next().get(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE)).toArray(); final DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, progress.getProcessId(), false); String sRegexOrAggregationOperator = GenotypingDataQueryBuilder.getGenotypePatternToQueryMap() .get(gtPattern); boolean fNeedToFilterOnGenotypingData = needToFilterOnGenotypingData(sModule, projId, sRegexOrAggregationOperator, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, geneName, variantEffects); final BasicDBList variantQueryDBList = buildVariantDataQuery(sModule, projId, selectedVariantTypes.length() == 0 ? null : Arrays.asList(selectedVariantTypes.split(";")), selectedSequenceList, minposition, maxposition, alleleCount.length() == 0 ? null : Arrays.asList(alleleCount.split(";"))); if (!variantQueryDBList.isEmpty() && tmpVarColl.count() == 0 /* otherwise we kept the preliminary list from the count procedure */) { // apply filter on variant features progress.setProgressDescription("Filtering variants for display..."); long beforeAggQuery = System.currentTimeMillis(); List<DBObject> pipeline = new ArrayList<DBObject>(); pipeline.add(new BasicDBObject("$match", new BasicDBObject("$and", variantQueryDBList))); BasicDBObject projectObject = new BasicDBObject("_id", "$_id"); projectObject.put(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE, "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE); projectObject.put( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE, "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE); projectObject.put(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_END_SITE, "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_END_SITE); projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE); projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST, "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST); pipeline.add(new BasicDBObject("$project", projectObject)); pipeline.add(new BasicDBObject("$out", tmpVarColl.getName())); variantColl.aggregate(pipeline); LOG.debug("Variant preliminary query found " + tmpVarColl.count() + " results in " + (System.currentTimeMillis() - beforeAggQuery) / 1000f + "s"); progress.setProgressDescription(null); } else if (fNeedToFilterOnGenotypingData && tmpVarColl.count() > 0) LOG.debug( "Re-using " + tmpVarColl.count() + " results from count procedure's variant preliminary query"); if (progress.hasAborted()) return false; if (fNeedToFilterOnGenotypingData) { // now filter on genotyping data final ConcurrentLinkedQueue<Thread> queryThreadsToWaitFor = new ConcurrentLinkedQueue<Thread>(), removalThreadsToWaitFor = new ConcurrentLinkedQueue<Thread>(); final AtomicInteger finishedThreadCount = new AtomicInteger(0); final ConcurrentSkipListSet<Comparable> allVariantsThatPassRunFilter = new ConcurrentSkipListSet<Comparable>(); GigwaSearchVariantsExportRequest gsvr = new GigwaSearchVariantsExportRequest(); gsvr.setAlleleCount(alleleCount); if (minposition != null) gsvr.setStart(minposition); if (maxposition != null) gsvr.setEnd(maxposition); gsvr.setGeneName(geneName); gsvr.setReferenceName(selectedSequences); gsvr.setSelectedVariantTypes(selectedVariantTypes); gsvr.setVariantEffect(variantEffects); gsvr.setVariantSetId(sModule + ServiceInterface.ID_SEPARATOR + projId); gsvr.setMissingData(missingData); gsvr.setMinmaf(minmaf); gsvr.setMaxmaf(maxmaf); gsvr.setGtPattern(gtPattern); HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>(); annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold); annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold); gsvr.setAnnotationFieldThresholds(annotationFieldThresholds); gsvr.setCallSetIds(selectedIndividuals == null || selectedIndividuals.length() == 0 ? getIndividualsInDbOrder(sModule, projId) : Arrays.asList(selectedIndividuals.split(";"))); final GenotypingDataQueryBuilder genotypingDataQueryBuilder = new GenotypingDataQueryBuilder(gsvr, tmpVarColl); genotypingDataQueryBuilder.keepTrackOfPreFilters(!variantQueryDBList.isEmpty()); try { final int nChunkCount = genotypingDataQueryBuilder.getNumberOfQueries(); if (nChunkCount != partialCountArray.length) { LOG.error("Different number of chunks between counting and listing variant rows!"); progress.setError("Different number of chunks between counting and listing variant rows!"); return false; } if (nChunkCount > 1) LOG.debug("Query split into " + nChunkCount); ArrayList<List<DBObject>> genotypingDataPipelines = new ArrayList(); while (genotypingDataQueryBuilder.hasNext()) genotypingDataPipelines.add(genotypingDataQueryBuilder.next()); ArrayList<Integer> chunkIndices = new ArrayList<Integer>(); for (int i = 0; i < genotypingDataPipelines.size(); i++) chunkIndices.add(i); Collections.shuffle(chunkIndices); for (int i = 0; i < chunkIndices.size(); i++) { final int chunkIndex = chunkIndices.get(i); final List<DBObject> genotypingDataPipeline = genotypingDataPipelines.get(chunkIndex); if (progress.hasAborted()) { genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain return false; } Thread t = new Thread() { public void run() { Cursor genotypingDataCursor = mongoTemplate .getCollection( MongoTemplateManager.getMongoCollectionName(VariantRunData.class)) .aggregate(genotypingDataPipeline, AggregationOptions.builder().allowDiskUse(true).build()); final ArrayList<Comparable> variantsThatPassedRunFilter = new ArrayList<Comparable>(); while (genotypingDataCursor.hasNext()) variantsThatPassedRunFilter .add((Comparable) genotypingDataCursor.next().get("_id")); if (variantQueryDBList.isEmpty()) // otherwise we won't need it allVariantsThatPassRunFilter.addAll(variantsThatPassedRunFilter); else { // mark the results we want to keep final List<Comparable> lastUsedPreFilter = genotypingDataQueryBuilder .getPreFilteredIDsForChunk(chunkIndex); Thread removalThread = new Thread() { public void run() { genotypingDataPipeline.clear(); // release memory (VERY IMPORTANT) long beforeTempCollUpdate = System.currentTimeMillis(); if (variantsThatPassedRunFilter.size() == lastUsedPreFilter.size()) return; // none to remove Collection<Comparable> filteredOutVariants = variantsThatPassedRunFilter .size() == 0 ? lastUsedPreFilter : CollectionUtils.subtract(lastUsedPreFilter, variantsThatPassedRunFilter); BasicDBObject removalQuery = GenotypingDataQueryBuilder .tryAndShrinkIdList("_id", filteredOutVariants, 4); WriteResult wr = tmpVarColl.remove(removalQuery); LOG.debug("Chunk N." + (chunkIndex) + ": " + wr.getN() + " filtered-out temp records removed in " + (System.currentTimeMillis() - beforeTempCollUpdate) / 1000d + "s"); progress.setCurrentStepProgress( (short) (finishedThreadCount.incrementAndGet() * 100 / nChunkCount)); } }; removalThreadsToWaitFor.add(removalThread); removalThread.start(); } } }; if (i % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS - 1)) t.run(); // sometimes run synchronously so that all queries are not sent at the same time (also helps smooth progress display) else { queryThreadsToWaitFor.add(t); t.start(); // run asynchronously for better speed } } // wait for all threads before moving to next phase for (Thread t : queryThreadsToWaitFor) t.join(); for (Thread t : removalThreadsToWaitFor) t.join(); } catch (Exception e) { genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain throw e; } if (progress.hasAborted()) return false; progress.addStep("Updating temporary results"); progress.moveToNextStep(); final long beforeTempCollUpdate = System.currentTimeMillis(); mongoTemplate.getDb().setWriteConcern(WriteConcern.ACKNOWLEDGED); if (variantQueryDBList.isEmpty()) { // we filtered on runs only: keep track of the final dataset List<BasicDBObject> pipeline = new ArrayList<>(); pipeline.add(new BasicDBObject("$match", GenotypingDataQueryBuilder.tryAndShrinkIdList("_id", allVariantsThatPassRunFilter, 4))); BasicDBObject projectObject = new BasicDBObject("_id", "$_id"); projectObject.put( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE, "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE); projectObject.put( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE, "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE); projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE); projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST, "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST); projectObject.put(VariantData.FIELDNAME_VERSION, "$" + VariantData.FIELDNAME_VERSION); pipeline.add(new BasicDBObject("$project", projectObject)); pipeline.add(new BasicDBObject("$out", tmpVarColl.getName())); variantColl.aggregate(pipeline); LOG.debug(tmpVarColl.count() + " temp records created in " + (System.currentTimeMillis() - beforeTempCollUpdate) / 1000d + "s"); } } progress.markAsComplete(); LOG.info("findVariants took " + (System.currentTimeMillis() - before) / 1000d + "s"); return true; }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * Export variants.//w ww.j av a2 s . com * * @param request the request * @param response the response * @param sModule the module * @param fKeepExportOnServer whether or not to keep export on server * @param sExportFormat the export format * @param exportID the export id * @param projId the proj id * @param selectedVariantTypes the selected variant types * @param selectedSequences the selected sequences * @param selectedIndividuals the selected individuals * @param gtPattern the gt code * @param genotypeQualityThreshold the genotype quality threshold * @param readDepthThreshold the read depth threshold * @param missingData the missing data * @param minmaf the minmaf * @param maxmaf the maxmaf * @param minposition the minposition * @param maxposition the maxposition * @param alleleCount the allele count * @param geneName the gene name * @param variantEffects the variant effects * @throws Exception the exception */ @RequestMapping(variantExportDataURL) protected void exportVariants(HttpServletRequest request, HttpServletResponse response, @RequestParam("module") String sModule, @RequestParam("keepExportOnServer") boolean fKeepExportOnServer, @RequestParam("exportFormat") String sExportFormat, @RequestParam("exportID") String exportID, @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes, @RequestParam("sequences") String selectedSequences, @RequestParam(value = "individuals", required = false) String selectedIndividuals, @RequestParam("gtPattern") String gtPattern, @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold, @RequestParam("readDepthThreshold") int readDepthThreshold, @RequestParam("missingData") double missingData, @RequestParam(value = "minmaf", required = false) Float minmaf, @RequestParam(value = "maxmaf", required = false) Float maxmaf, @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects) throws Exception { // exportID = URLDecoder.decode(exportID, "UTF-8"); String token = exportID.substring(1 + exportID.indexOf('|')); ProgressIndicator progress = ProgressIndicator.get(token); if (progress == null) { progress = new ProgressIndicator(token, new String[] { "Identifying matching variants" }); ProgressIndicator.registerProgressIndicator(progress); } long before = System.currentTimeMillis(); final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); List<String> selectedIndividualList = selectedIndividuals.length() == 0 ? getIndividualsInDbOrder(sModule, projId) /* no selection means all selected */ : Arrays.asList(selectedIndividuals.split(";")); long count = countVariants(request, sModule, projId, selectedVariantTypes, selectedSequences, selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects, "" /* if we pass exportID then the progress indicator is going to be replaced by another, and we don't need it for counting since we cache count values */); DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, token, false); long nTempVarCount = mongoTemplate.count(new Query(), tmpVarColl.getName()); boolean fWorkingOnFullDataset = mongoTemplate.count(null, VariantData.class) == count; if (!fWorkingOnFullDataset && nTempVarCount == 0) { progress.setError(MESSAGE_TEMP_RECORDS_NOT_FOUND); return; } // use a cursor to avoid using too much memory DBObject query = count == nTempVarCount ? null : new BasicDBObject(VariantData.FIELDNAME_VERSION, new BasicDBObject("$exists", true)); String sequenceField = VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE; String startField = VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE; BasicDBObject sort = new BasicDBObject("_id", 1); /* necessary for MgdbDao.getSampleGenotypes to work properly */ DBObject projection = new BasicDBObject(); projection.put(sequenceField, 1); projection.put(startField, 1); DBCursor markerCursor = mongoTemplate.getCollection( !fWorkingOnFullDataset ? tmpVarColl.getName() : mongoTemplate.getCollectionName(VariantData.class)) .find(query, projection).sort(sort); markerCursor.addOption(Bytes.QUERYOPTION_NOTIMEOUT); try { AbstractIndividualOrientedExportHandler individualOrientedExportHandler = AbstractIndividualOrientedExportHandler .getIndividualOrientedExportHandlers().get(sExportFormat); AbstractMarkerOrientedExportHandler markerOrientedExportHandler = AbstractMarkerOrientedExportHandler .getMarkerOrientedExportHandlers().get(sExportFormat); GenotypingProject project = mongoTemplate.findById(projId, GenotypingProject.class); String filename = sModule + "_" + project.getName() + "_" + new SimpleDateFormat("yyyy-MM-dd").format(new Date()) + "_" + count + "variants_" + sExportFormat + "." + (individualOrientedExportHandler != null ? individualOrientedExportHandler : markerOrientedExportHandler).getExportFileExtension(); OutputStream os; LOG.info((fKeepExportOnServer ? "On-server" : "Direct-download") + " export requested: " + token); if (fKeepExportOnServer) { String relativeOutputFolder = File.separator + FRONTEND_URL + File.separator + TMP_OUTPUT_FOLDER + File.separator + token.replaceAll("\\|", "_") + File.separator; File outputLocation = new File( request.getSession().getServletContext().getRealPath(relativeOutputFolder)); if (!outputLocation.exists() && !outputLocation.mkdirs()) throw new Exception("Unable to create folder: " + outputLocation); os = new FileOutputStream(new File(outputLocation.getAbsolutePath() + File.separator + filename)); response.setContentType("text/plain"); } else { os = response.getOutputStream(); response.setContentType("application/zip"); response.setHeader("Content-disposition", "inline; filename=" + filename); } ArrayList<SampleId> sampleIDs = new ArrayList<SampleId>(); for (String individual : selectedIndividualList) for (Integer individualSampleIndex : project.getIndividualSampleIndexes(individual)) sampleIDs.add(new SampleId(projId, individualSampleIndex)); if (fKeepExportOnServer) { String relativeOutputFolder = FRONTEND_URL + File.separator + TMP_OUTPUT_FOLDER + File.separator + token.replaceAll("\\|", "_") + File.separator; String relativeOutputFolderUrl = request.getContextPath() + "/" + relativeOutputFolder.replace(File.separator, "/"); String exportURL = relativeOutputFolderUrl + filename; LOG.debug("On-server export file for export " + token + ": " + exportURL); response.getWriter().write(exportURL); response.flushBuffer(); } // else // { // // The two next lines are an ugly hack that makes the client believe transfer has started. Otherwise we may end-up with a client-side timeout (search for network.http.response.timeout for more details) // response.getOutputStream().print(" "); // response.getOutputStream().flush(); // } HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>(); annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold); annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold); if (individualOrientedExportHandler != null) { progress.addStep("Reading and re-organizing genotypes"); // initial step will consist in organizing genotypes by individual rather than by marker progress.moveToNextStep(); // done with identifying variants TreeMap<String, File> exportFiles = individualOrientedExportHandler.createExportFiles(sModule, markerCursor.copy(), sampleIDs, new ArrayList<SampleId>(), token, annotationFieldThresholds, new HashMap<String, Integer>(), project.getSampleIdToIndividualMap(selectedIndividualList), progress); if (!progress.hasAborted()) { for (String step : individualOrientedExportHandler.getStepList()) progress.addStep(step); progress.moveToNextStep(); individualOrientedExportHandler.exportData(os, sModule, exportFiles.values(), true, progress, markerCursor, null, null); } } else if (markerOrientedExportHandler != null) { for (String step : markerOrientedExportHandler.getStepList()) progress.addStep(step); progress.moveToNextStep(); // done with identifying variants markerOrientedExportHandler.exportData(os, sModule, sampleIDs, new ArrayList<SampleId>(), progress, markerCursor, null, annotationFieldThresholds, new HashMap<String, Integer>(), project.getSampleIdToIndividualMap(selectedIndividualList), null); LOG.debug("done with exportData"); } else throw new Exception("No export handler found for format " + sExportFormat); if (!progress.hasAborted()) { LOG.info("doVariantExport took " + (System.currentTimeMillis() - before) / 1000d + "s to process " + count + " variants and " + selectedIndividualList.size() + " individuals"); progress.markAsComplete(); } } catch (Throwable t) { LOG.error("Error exporting data", t); progress.setError("Error exporting data: " + t.getClass().getSimpleName() + (t.getMessage() != null ? " - " + t.getMessage() : "")); return; } finally { markerCursor.close(); } }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * Selection density.// www. j a v a 2s . co m * * @param request the request * @param sModule the module * @param projId the proj id * @param selectedVariantTypes the selected variant types * @param selectedSequences the selected sequences * @param selectedIndividuals the selected individuals * @param gtPattern the genotype pattern * @param genotypeQualityThreshold the genotype quality threshold * @param readDepthThreshold the read depth threshold * @param missingData the missing data * @param minmaf the minmaf * @param maxmaf the maxmaf * @param minposition the minposition * @param maxposition the maxposition * @param alleleCount the allele count * @param geneName the gene name * @param variantEffects the variant effects * @param processID the process id * @param displayedSequence the displayed sequence * @param displayedRangeMin the displayed range min * @param displayedRangeMax the displayed range max * @param displayedRangeIntervalCount the displayed range interval count * @param displayedVariantType the displayed variant type * @return the map * @throws Exception the exception */ @RequestMapping(selectionDensityDataURL) protected @ResponseBody Map<Long, Long> selectionDensity(HttpServletRequest request, @RequestParam("module") String sModule, @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes, @RequestParam("sequences") String selectedSequences, @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern, @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold, @RequestParam("readDepthThreshold") int readDepthThreshold, @RequestParam("missingData") double missingData, @RequestParam("minmaf") Float minmaf, @RequestParam("maxmaf") Float maxmaf, @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects, @RequestParam("processID") String processID, @RequestParam("displayedSequence") String displayedSequence, @RequestParam(required = false, value = "displayedRangeMin") Long displayedRangeMin, @RequestParam(required = false, value = "displayedRangeMax") Long displayedRangeMax, @RequestParam(required = false, value = "displayedRangeIntervalCount") final Integer displayedRangeIntervalCount, @RequestParam(required = false, value = "displayedVariantType") String displayedVariantType) throws Exception { long before = System.currentTimeMillis(); String token = processID.substring(1 + processID.indexOf('|')); ProgressIndicator progress = new ProgressIndicator(token, new String[] { "Calculating " + (displayedVariantType != null ? displayedVariantType + " " : "") + "variant density on sequence " + displayedSequence }); ProgressIndicator.registerProgressIndicator(progress); final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); long count = countVariants(request, sModule, projId, selectedVariantTypes, selectedSequences, selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects, "" /* if we pass exportID then the progress indicator is going to be replaced by another, and we don't need it for counting since we cache count values */); DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, token, false); // boolean fStillGotUnwantedTempVariants = count < tmpVarColl.count(); long nTempVarCount = mongoTemplate.count(new Query(), tmpVarColl.getName()); final boolean fWorkingOnFullDataset = mongoTemplate.count(null, VariantData.class) == count; if (!fWorkingOnFullDataset && nTempVarCount == 0) { progress.setError(MESSAGE_TEMP_RECORDS_NOT_FOUND); return null; } final String actualCollectionName = fWorkingOnFullDataset ? mongoTemplate.getCollectionName(VariantData.class) : tmpVarColl.getName(); if (displayedRangeMin == null || displayedRangeMax == null) { BasicDBList matchAndList = new BasicDBList(); matchAndList.add(new BasicDBObject( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE, displayedSequence)); if (displayedVariantType != null) matchAndList.add(new BasicDBObject(VariantData.FIELDNAME_TYPE, displayedVariantType)); BasicDBObject match = new BasicDBObject("$match", new BasicDBObject("$and", matchAndList)); BasicDBObject groupFields = new BasicDBObject("_id", null); groupFields.put("min", new BasicDBObject("$min", "$" + (VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE))); groupFields.put("max", new BasicDBObject("$max", "$" + (VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE))); BasicDBObject group = new BasicDBObject("$group", groupFields); List<DBObject> pipeline = new ArrayList<DBObject>(); pipeline.add(match); pipeline.add(group); Iterator<DBObject> iterator = mongoTemplate.getCollection(actualCollectionName).aggregate(pipeline) .results().iterator(); if (!iterator.hasNext()) { progress.markAsComplete(); return null; // no variants found matching filter } DBObject aggResult = (DBObject) iterator.next(); if (displayedRangeMin == null) displayedRangeMin = (Long) aggResult.get("min"); if (displayedRangeMax == null) displayedRangeMax = (Long) aggResult.get("max"); } final AtomicInteger finishedThreadCount = new AtomicInteger(0), nTotalTreatedVariantCountfinishedThreadCount = new AtomicInteger(0); final ConcurrentHashMap<Long, Long> result = new ConcurrentHashMap<Long, Long>(); final int intervalSize = Math.max(1, (int) ((displayedRangeMax - displayedRangeMin) / displayedRangeIntervalCount)); final ArrayList<Thread> threadsToWaitFor = new ArrayList<Thread>(); final long rangeMin = displayedRangeMin; final ProgressIndicator finalProgress = progress; for (int i = 0; i < displayedRangeIntervalCount; i++) { List<Criteria> crits = new ArrayList<Criteria>(); crits.add(Criteria .where(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE) .is(displayedSequence)); // if (fStillGotUnwantedTempVariants) // crits.add(Criteria.where(VariantData.FIELDNAME_VERSION).exists(true)); if (displayedVariantType != null) crits.add(Criteria.where(VariantData.FIELDNAME_TYPE).is(displayedVariantType)); String startSitePath = VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE; crits.add(Criteria.where(startSitePath).gte(displayedRangeMin + (i * intervalSize))); if (i < displayedRangeIntervalCount - 1) crits.add(Criteria.where(startSitePath).lt(displayedRangeMin + ((i + 1) * intervalSize))); else crits.add(Criteria.where(startSitePath).lte(displayedRangeMax)); final Query query = new Query(new Criteria().andOperator(crits.toArray(new Criteria[crits.size()]))); final long chunkIndex = i; Thread t = new Thread() { public void run() { if (!finalProgress.hasAborted()) { long partialCount = mongoTemplate.count(query, actualCollectionName); nTotalTreatedVariantCountfinishedThreadCount.addAndGet((int) partialCount); result.put(rangeMin + (chunkIndex * intervalSize), partialCount); finalProgress.setCurrentStepProgress((short) (finishedThreadCount.incrementAndGet() * 100 / displayedRangeIntervalCount)); } } }; if (chunkIndex % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS - 1)) t.run(); // run synchronously else { threadsToWaitFor.add(t); t.start(); // run asynchronously for better speed } } if (progress.hasAborted()) return null; for (Thread t : threadsToWaitFor) // wait for all threads before moving to next phase t.join(); progress.setCurrentStepProgress(100); LOG.debug("selectionDensity treated " + nTotalTreatedVariantCountfinishedThreadCount.get() + " variants in " + (System.currentTimeMillis() - before) / 1000f + "s"); progress.markAsComplete(); return new TreeMap<Long, Long>(result); }
From source file:localdomain.localhost.MongoDbTroubleshooter.java
License:Open Source License
public void run() throws Exception { try (PrintWriter writer = new PrintWriter(System.out)) { MongoClientURI mongoClientUri = new MongoClientURI(uri); writer.println("" + "host=" + mongoClientUri.getHosts() + ",username=" + mongoClientUri.getUsername() + ",database=" + mongoClientUri.getDatabase() + ",collection=" + mongoClientUri.getCollection() + ""); writer.println();/*w w w . ja va 2s . c om*/ writer.println(); writer.println("# MongoClient"); writer.println(); MongoClient mongoClient = new MongoClient(mongoClientUri); writer.println("" + mongoClient + ""); writer.println(); writer.println(); writer.println("# Databases"); writer.println(); try { List<String> databaseNames = mongoClient.getDatabaseNames(); for (String databaseName : databaseNames) { writer.println("* " + databaseName + (databaseName.equals(mongoClientUri.getDatabase()) ? " - default database" : "")); } } catch (Exception e) { writer.println("Could not list the databases of the MongoDB instance: '" + e.getMessage() + "'"); } writer.println(); writer.println(); writer.println("# Database"); writer.println(); DB db = mongoClient.getDB(mongoClientUri.getDatabase()); writer.println("DB: " + db.getName() + ""); writer.println(); writer.println(); writer.println("## Collections"); writer.println(); Set<String> myCollections = db.getCollectionNames(); if (myCollections.isEmpty()) { writer.println("NO COLLECTIONS!"); } else { for (String collection : myCollections) { DBCollection dbCollection = db.getCollection(collection); writer.println("* " + dbCollection.getName() + " - " + dbCollection.getCount() + " entries"); } } } }
From source file:localdomain.localhost.MyServlet.java
License:Apache License
@Override protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { PrintWriter writer = resp.getWriter(); writer.println("<html>"); writer.println("<head><title>MyServlet</title></head>"); writer.println("<body><h1>MyServlet</h1>"); writer.println("<h2>MongoDB</h2>"); String uriAsString = System.getProperty("MONGOHQ_URL_MYDB", "mongodb://localhost/local"); MongoClient mongoClient = null;//from w ww . j a v a2 s . com try { writer.println("<h4>MongoClientURI</h4>"); MongoClientURI uri = new MongoClientURI(uriAsString); writer.println("<p>" + "host=" + uri.getHosts() + ",username=" + uri.getUsername() + ",database=" + uri.getDatabase() + ",collection=" + uri.getCollection() + "</p>"); writer.println("<h4>MongoClient</h4>"); mongoClient = new MongoClient(uri); writer.println("<p>" + mongoClient + "</p>"); writer.println("<h4>Databases</h4>"); try { List<String> databaseNames = mongoClient.getDatabaseNames(); writer.println("<ul>"); for (String databaseName : databaseNames) { writer.println("<li>" + databaseName + (databaseName.equals(uri.getDatabase()) ? " - default database" : "")); } writer.println("</ul>"); } catch (Exception e) { writer.println("<p>Could not list the databases of the MongoDB instance: <code>" + e.getMessage() + "</code></p>"); } writer.println("<h4>Database</h4>"); DB db = mongoClient.getDB(uri.getDatabase()); writer.println("<p>DB: " + db.getName() + "</p>"); writer.println("<h4>Collections</h4>"); Set<String> myCollections = db.getCollectionNames(); if (myCollections.isEmpty()) { writer.println("<p>NO COLLECTIONS!</p>"); } else { writer.println("<ul>"); for (String collection : myCollections) { DBCollection dbCollection = db.getCollection(collection); writer.println( "<li>" + dbCollection.getName() + " - " + dbCollection.getCount() + " entries</li>"); } writer.println("</ul>"); } writer.println("<h4>Success!</h4>"); writer.println("SUCCESS to access the mongodb database"); } catch (Exception e) { writer.println("<code><pre>"); e.printStackTrace(writer); writer.println("</pre></code>"); e.printStackTrace(); } finally { if (mongoClient != null) { try { mongoClient.close(); } catch (Exception e) { e.printStackTrace(); } } } writer.println("</body></html>"); }
From source file:nl.knaw.huygens.timbuctoo.storage.mongo.MongoDB.java
License:Open Source License
/** * Inserts a document into the database. *//*www . j a v a2 s . c o m*/ public void insert(DBCollection collection, String id, DBObject document) throws StorageException { try { collection.insert(document); if (collection.find(new BasicDBObject("_id", id)) == null) { LOG.error("Failed to insert ({}, {})", collection.getName(), id); throw new StorageException("Insert failed"); } } catch (MongoException e) { throw new StorageException(e); } }
From source file:org.apache.camel.component.mongodb.MongoDbProducer.java
License:Apache License
private DBCollection calculateCollection(Exchange exchange) throws Exception { // dynamic calculation is an option. In most cases it won't be used and we should not penalise all users with running this // resolution logic on every Exchange if they won't be using this functionality at all if (!endpoint.isDynamicity()) { return endpoint.getDbCollection(); }/*from w w w . j av a 2 s .c o m*/ String dynamicDB = exchange.getIn().getHeader(MongoDbConstants.DATABASE, String.class); String dynamicCollection = exchange.getIn().getHeader(MongoDbConstants.COLLECTION, String.class); @SuppressWarnings("unchecked") List<DBObject> dynamicIndex = exchange.getIn().getHeader(MongoDbConstants.COLLECTION_INDEX, List.class); DBCollection dbCol = null; if (dynamicDB == null && dynamicCollection == null) { dbCol = endpoint.getDbCollection(); } else { DB db = null; if (dynamicDB == null) { db = endpoint.getDb(); } else { db = endpoint.getMongoConnection().getDB(dynamicDB); } if (dynamicCollection == null) { dbCol = db.getCollection(endpoint.getCollection()); } else { dbCol = db.getCollection(dynamicCollection); // on the fly add index if (dynamicIndex == null) { endpoint.ensureIndex(dbCol, endpoint.createIndex()); } else { endpoint.ensureIndex(dbCol, dynamicIndex); } } } if (LOG.isDebugEnabled()) { LOG.debug("Dynamic database and/or collection selected: {}->{}", dbCol.getDB().getName(), dbCol.getName()); } return dbCol; }