List of usage examples for com.mongodb DBCursor close
@Override public void close()
From source file:exifIndexer.MetadataQueries.java
public ResultDataNormal DateCreated(String s) { //Consulta DateCreated ArrayList paths = new ArrayList<>(); ArrayList names = new ArrayList<>(); MongoHandler dbmon = new MongoHandler(); DB dbmongo = dbmon.connect();/*from w w w. j a v a 2s . com*/ DBCursor cursorDoc; DBCollection collection = dbmongo.getCollection("Date_created"); BasicDBObject query = new BasicDBObject("DATE_VALUE", s); cursorDoc = collection.find(query); if (isNull(cursorDoc)) { System.out.println("nulo"); } try { while (cursorDoc.hasNext()) { paths.add((cursorDoc.next().get("IMG_PATH"))); names.add((cursorDoc.curr().get("IMG_NAME")) + "." + (cursorDoc.curr().get("EXTENSION"))); } } finally { cursorDoc.close(); } return new ResultDataNormal(paths, names); }
From source file:exifIndexer.MetadataQueries.java
public ResultDataWithGPS gps() { //Consulta gps ArrayList paths = new ArrayList<>(); ArrayList names = new ArrayList<>(); ArrayList latitudes = new ArrayList<>(); ArrayList longitudes = new ArrayList<>(); ArrayList latref = new ArrayList<>(); ArrayList lonref = new ArrayList<>(); MongoHandler dbmon = new MongoHandler(); DB dbmongo = dbmon.connect();/*from w ww . j av a2 s . c om*/ DBCursor cursorDoc; Integer cont = 0; Integer pos = 0; ResultDataWithGPS res = null; DBCollection collection = dbmongo.getCollection("GPSFotos"); // BasicDBObject query = new BasicDBObject("GPS_VALUE",true); cursorDoc = collection.find(); try { while (cursorDoc.hasNext()) { pos = cont % 4; switch (pos) { case 0: //GPS_VALUE latitudes.add((cursorDoc.next().get("GPS_VALUE"))); break; case 1: latref.add((cursorDoc.next().get("GPS_VALUE"))); break; case 2: longitudes.add((cursorDoc.next().get("GPS_VALUE"))); break; case 3: lonref.add((cursorDoc.next().get("GPS_VALUE"))); break; } if ((cont != 0) && (pos == 0)) { paths.add((cursorDoc.curr().get("IMG_PATH"))); names.add((cursorDoc.curr().get("IMG_NAME")) + "." + (cursorDoc.curr().get("EXTENSION"))); } cont++; } res = new ResultDataWithGPS(paths, names, latitudes, longitudes, latref, lonref); } finally { cursorDoc.close(); } return res; }
From source file:field_sum.Field_sum.java
/** * @param args the command line arguments * @throws java.net.UnknownHostException *//*from www . j a va 2 s. c om*/ public static void main(String[] args) throws UnknownHostException { MongoClient client = new MongoClient("localhost", 27017); DB myDB = client.getDB("m101"); DBCollection collection = myDB.getCollection("Sample3"); /** * random_number field generates random numbers */ Random random_numbers = new Random(); collection.drop(); DBObject query = QueryBuilder.start("x").greaterThan(10).lessThan(70).and("y").greaterThan(10).lessThan(70) .get(); for (int i = 0; i < 100; i++) { collection.insert(new BasicDBObject("x", random_numbers.nextInt(100)) .append("y", random_numbers.nextInt(100)).append("z", random_numbers.nextInt(200))); } DBCursor cursor = collection.find(query); int sum = 0; try { while (cursor.hasNext()) { DBObject dBObject = cursor.next(); sum += (int) dBObject.get("x"); System.out.println(dBObject.get("x")); } } finally { cursor.close(); } System.out.println(sum); }
From source file:fr.cirad.mgdb.exporting.markeroriented.VcfExportHandler.java
License:Open Source License
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { Integer projectId = null;/* ww w. j a va2 s . co m*/ for (SampleId spId : sampleIDs) { if (projectId == null) projectId = spId.getProject(); else if (projectId != spId.getProject()) { projectId = 0; break; // more than one project are involved: no header will be written } } File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); } } LinkedHashMap<SampleId, String> sampleIDToIndividualIdMap = new LinkedHashMap<SampleId, String>(); ArrayList<String> individualList = new ArrayList<String>(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); for (int i = 0; i < sampleIDs.size(); i++) { String individualId = individuals.get(i).getId(); sampleIDToIndividualIdMap.put(sampleIDs.get(i), individualId); if (!individualList.contains(individualId)) { individualList.add(individualId); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".vcf")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nQueryChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; VariantContextWriter writer = null; try { List<String> distinctSequenceNames = new ArrayList<String>(); String sequenceSeqCollName = MongoTemplateManager.getMongoCollectionName(Sequence.class); if (mongoTemplate.collectionExists(sequenceSeqCollName)) { DBCursor markerCursorCopy = markerCursor.copy(); markerCursorCopy.batchSize(nQueryChunkSize); while (markerCursorCopy.hasNext()) { int nLoadedMarkerCountInLoop = 0; boolean fStartingNewChunk = true; while (markerCursorCopy.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) { DBObject exportVariant = markerCursorCopy.next(); String chr = (String) ((DBObject) exportVariant .get(VariantData.FIELDNAME_REFERENCE_POSITION)) .get(ReferencePosition.FIELDNAME_SEQUENCE); if (!distinctSequenceNames.contains(chr)) distinctSequenceNames.add(chr); } } markerCursorCopy.close(); } Collections.sort(distinctSequenceNames, new AlphaNumericStringComparator()); SAMSequenceDictionary dict = createSAMSequenceDictionary(sModule, distinctSequenceNames); writer = new CustomVCFWriter(null, zos, dict, false, false, true); // VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder(); // vcwb.unsetOption(Options.INDEX_ON_THE_FLY); // vcwb.unsetOption(Options.DO_NOT_WRITE_GENOTYPES); // vcwb.setOption(Options.USE_ASYNC_IOINDEX_ON_THE_FLY); // vcwb.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER); // vcwb.setReferenceDictionary(dict); // writer = vcwb.build(); // writer = new AsyncVariantContextWriter(writer, 3000); progress.moveToNextStep(); // done with dictionary DBCursor headerCursor = mongoTemplate .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class)) .find(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId)); Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>(); boolean fWriteCommandLine = true, fWriteEngineHeaders = true; // default values while (headerCursor.hasNext()) { DBVCFHeader dbVcfHeader = DBVCFHeader.fromDBObject(headerCursor.next()); headerLines.addAll(dbVcfHeader.getHeaderLines()); // Add sequence header lines (not stored in our vcf header collection) BasicDBObject projection = new BasicDBObject(SequenceStats.FIELDNAME_SEQUENCE_LENGTH, true); int nSequenceIndex = 0; for (String sequenceName : distinctSequenceNames) { String sequenceInfoCollName = MongoTemplateManager.getMongoCollectionName(SequenceStats.class); boolean fCollectionExists = mongoTemplate.collectionExists(sequenceInfoCollName); if (fCollectionExists) { DBObject record = mongoTemplate.getCollection(sequenceInfoCollName).findOne( new Query(Criteria.where("_id").is(sequenceName)).getQueryObject(), projection); if (record == null) { LOG.warn("Sequence '" + sequenceName + "' not found in collection " + sequenceInfoCollName); continue; } Map<String, String> sequenceLineData = new LinkedHashMap<String, String>(); sequenceLineData.put("ID", (String) record.get("_id")); sequenceLineData.put("length", ((Number) record.get(SequenceStats.FIELDNAME_SEQUENCE_LENGTH)).toString()); headerLines.add(new VCFContigHeaderLine(sequenceLineData, nSequenceIndex++)); } } fWriteCommandLine = headerCursor.size() == 1 && dbVcfHeader.getWriteCommandLine(); // wouldn't make sense to include command lines for several runs if (!dbVcfHeader.getWriteEngineHeaders()) fWriteEngineHeaders = false; } headerCursor.close(); VCFHeader header = new VCFHeader(headerLines, individualList); header.setWriteCommandLine(fWriteCommandLine); header.setWriteEngineHeaders(fWriteEngineHeaders); writer.writeHeader(header); short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; HashMap<SampleId, Comparable /*phID*/> phasingIDsBySample = new HashMap<SampleId, Comparable>(); while (markerCursor.hasNext()) { if (progress.hasAborted()) return; int nLoadedMarkerCountInLoop = 0; boolean fStartingNewChunk = true; markerCursor.batchSize(nQueryChunkSize); List<Comparable> currentMarkers = new ArrayList<Comparable>(); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); currentMarkers.add((Comparable) exportVariant.get("_id")); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) { VariantContext vc = variant.toVariantContext(variantsAndRuns.get(variant), !ObjectId.isValid(variant.getId().toString()), sampleIDToIndividualIdMap, phasingIDsBySample, nMinimumGenotypeQuality, nMinimumReadDepth, warningFileWriter, markerSynonyms == null ? variant.getId() : markerSynonyms.get(variant.getId())); try { writer.add(vc); } catch (Throwable t) { Exception e = new Exception("Unable to convert to VariantContext: " + variant.getId(), t); LOG.debug("error", e); throw e; } if (nLoadedMarkerCountInLoop > currentMarkers.size()) LOG.error("Bug: writing variant number " + nLoadedMarkerCountInLoop + " (only " + currentMarkers.size() + " variants expected)"); } nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } progress.setCurrentStepProgress((short) 100); } catch (Exception e) { LOG.error("Error exporting", e); progress.setError(e.getMessage()); return; } finally { warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); if (writer != null) try { writer.close(); } catch (Throwable ignored) { } } }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * Setup detail page.//w ww. j av a2 s. com * * @param sModule the module * @param projectId the proj id * @param variantId the variant id * @param selectedIndividuals the selected individuals * @return the model and view * @throws Exception the exception */ @RequestMapping(value = variantDetailsURL, method = RequestMethod.GET) protected ModelAndView setupDetailPage(@RequestParam("module") String sModule, @RequestParam("project") int projectId, @RequestParam("variantId") String variantId, @RequestParam("individuals") String selectedIndividuals) throws Exception { ModelAndView mav = new ModelAndView(); MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); GenotypingProject project = mongoTemplate.findById(projectId, GenotypingProject.class); mav.addObject("project", project); List<String> selectedIndividualList = Arrays.asList(selectedIndividuals.split(";")); HashMap<Integer, String> sampleToIndividualMap = new LinkedHashMap<Integer, String>(); HashMap<String, Boolean> individualMap = new LinkedHashMap<String, Boolean>(); for (String ind : listIndividualsInAlphaNumericOrder(sModule, projectId)) { for (Integer sampleIndex : project.getIndividualSampleIndexes(ind)) sampleToIndividualMap.put(sampleIndex, ind); individualMap.put(ind, selectedIndividuals.length() == 0 || selectedIndividualList.contains(ind)); } mav.addObject("individualMap", individualMap); HashMap<Integer, List<String>> sampleIDsByProject = new HashMap<Integer, List<String>>(); sampleIDsByProject.put(projectId, selectedIndividualList); VariantData var = mongoTemplate.findById(variantId, VariantData.class); mav.addObject("variantType", var.getType()); mav.addObject("refPos", var.getReferencePosition()); Map<String /* run */, Map<String /* individual */, List<Comparable /* cell value */>>> dataByRun = new TreeMap<String, Map<String, List<Comparable>>>( new AlphaNumericComparator()); Map<String /* run */, Map<String /* info field */, Object>> additionalInfoByRun = new TreeMap<String, Map<String, Object>>( new AlphaNumericComparator()); Map<String /* run */, Map<String /* info field */, VCFInfoHeaderLine>> additionalInfoDescByRun = new HashMap<String, Map<String, VCFInfoHeaderLine>>(); List<String> headerCols = new ArrayList<String>(); List<String> headerColDescs = new ArrayList<String>(); List<Criteria> crits = new ArrayList<Criteria>(); crits.add(Criteria.where("_id." + VariantRunDataId.FIELDNAME_PROJECT_ID).is(projectId)); crits.add(Criteria.where("_id." + VariantRunDataId.FIELDNAME_VARIANT_ID).is(var.getId())); List<VariantRunData> runs = mongoTemplate.find( new Query(new Criteria().andOperator(crits.toArray(new Criteria[crits.size()]))), VariantRunData.class); for (VariantRunData run : runs) { DBVCFHeader vcfHeader = null; BasicDBList andList = new BasicDBList(); andList.add(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId)); andList.add(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_RUN, run.getRunName())); DBCursor headerCursor = mongoTemplate .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class)) .find(new BasicDBObject("$and", andList)); if (headerCursor.size() > 0 && headerCols.isEmpty()) { vcfHeader = DBVCFHeader.fromDBObject(headerCursor.next()); headerCursor.close(); } Map<String /* individual */, List<Comparable /* cell value */>> genotypeRows = new TreeMap<String, List<Comparable>>( new AlphaNumericComparator()); additionalInfoByRun.put(run.getRunName(), run.getAdditionalInfo()); if (vcfHeader != null) additionalInfoDescByRun.put(run.getRunName(), vcfHeader.getmInfoMetaData()); dataByRun.put(run.getRunName(), genotypeRows); for (Integer sample : run.getSampleGenotypes().keySet()) { SampleGenotype sg = run.getSampleGenotypes().get(sample); List<Comparable> genotypeRow = new ArrayList<Comparable>(); genotypeRows.put(sampleToIndividualMap.get(sample), genotypeRow); genotypeRow.add(sg.getCode()); for (String gtInfo : sg.getAdditionalInfo().keySet()) { if (!headerCols.contains(gtInfo) /* exclude some fields that we don't want to show */ && !gtInfo .equals(VariantData.GT_FIELD_PHASED_GT) && !gtInfo.equals(VariantData.GT_FIELD_PHASED_ID) && !gtInfo.equals(VariantRunData.FIELDNAME_ADDITIONAL_INFO_EFFECT_GENE) && !gtInfo.equals(VariantRunData.FIELDNAME_ADDITIONAL_INFO_EFFECT_NAME)) { headerCols.add(gtInfo); headerColDescs.add(vcfHeader != null ? ((VCFFormatHeaderLine) vcfHeader.getmFormatMetaData().get(gtInfo)) .getDescription() : ""); } if (!headerCols.contains(gtInfo)) continue; int cellIndex = headerCols.indexOf(gtInfo); while (genotypeRow.size() < cellIndex + 2) genotypeRow.add(null); genotypeRow.set(cellIndex + 1, sg.getAdditionalInfo().get(gtInfo)); } } } mav.addObject("headerAdditionalInfo", headerCols); mav.addObject("headerAdditionalInfoDesc", headerColDescs); mav.addObject("runAdditionalInfo", additionalInfoByRun); mav.addObject("runAdditionalInfoDesc", additionalInfoDescByRun); mav.addObject("dataByRun", dataByRun); mav.addObject("knownAlleles", var.getKnownAlleleList()); return mav; }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * Export variants./* w w w . j a va 2 s. c o m*/ * * @param request the request * @param response the response * @param sModule the module * @param fKeepExportOnServer whether or not to keep export on server * @param sExportFormat the export format * @param exportID the export id * @param projId the proj id * @param selectedVariantTypes the selected variant types * @param selectedSequences the selected sequences * @param selectedIndividuals the selected individuals * @param gtPattern the gt code * @param genotypeQualityThreshold the genotype quality threshold * @param readDepthThreshold the read depth threshold * @param missingData the missing data * @param minmaf the minmaf * @param maxmaf the maxmaf * @param minposition the minposition * @param maxposition the maxposition * @param alleleCount the allele count * @param geneName the gene name * @param variantEffects the variant effects * @throws Exception the exception */ @RequestMapping(variantExportDataURL) protected void exportVariants(HttpServletRequest request, HttpServletResponse response, @RequestParam("module") String sModule, @RequestParam("keepExportOnServer") boolean fKeepExportOnServer, @RequestParam("exportFormat") String sExportFormat, @RequestParam("exportID") String exportID, @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes, @RequestParam("sequences") String selectedSequences, @RequestParam(value = "individuals", required = false) String selectedIndividuals, @RequestParam("gtPattern") String gtPattern, @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold, @RequestParam("readDepthThreshold") int readDepthThreshold, @RequestParam("missingData") double missingData, @RequestParam(value = "minmaf", required = false) Float minmaf, @RequestParam(value = "maxmaf", required = false) Float maxmaf, @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects) throws Exception { // exportID = URLDecoder.decode(exportID, "UTF-8"); String token = exportID.substring(1 + exportID.indexOf('|')); ProgressIndicator progress = ProgressIndicator.get(token); if (progress == null) { progress = new ProgressIndicator(token, new String[] { "Identifying matching variants" }); ProgressIndicator.registerProgressIndicator(progress); } long before = System.currentTimeMillis(); final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); List<String> selectedIndividualList = selectedIndividuals.length() == 0 ? getIndividualsInDbOrder(sModule, projId) /* no selection means all selected */ : Arrays.asList(selectedIndividuals.split(";")); long count = countVariants(request, sModule, projId, selectedVariantTypes, selectedSequences, selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects, "" /* if we pass exportID then the progress indicator is going to be replaced by another, and we don't need it for counting since we cache count values */); DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, token, false); long nTempVarCount = mongoTemplate.count(new Query(), tmpVarColl.getName()); boolean fWorkingOnFullDataset = mongoTemplate.count(null, VariantData.class) == count; if (!fWorkingOnFullDataset && nTempVarCount == 0) { progress.setError(MESSAGE_TEMP_RECORDS_NOT_FOUND); return; } // use a cursor to avoid using too much memory DBObject query = count == nTempVarCount ? null : new BasicDBObject(VariantData.FIELDNAME_VERSION, new BasicDBObject("$exists", true)); String sequenceField = VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE; String startField = VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE; BasicDBObject sort = new BasicDBObject("_id", 1); /* necessary for MgdbDao.getSampleGenotypes to work properly */ DBObject projection = new BasicDBObject(); projection.put(sequenceField, 1); projection.put(startField, 1); DBCursor markerCursor = mongoTemplate.getCollection( !fWorkingOnFullDataset ? tmpVarColl.getName() : mongoTemplate.getCollectionName(VariantData.class)) .find(query, projection).sort(sort); markerCursor.addOption(Bytes.QUERYOPTION_NOTIMEOUT); try { AbstractIndividualOrientedExportHandler individualOrientedExportHandler = AbstractIndividualOrientedExportHandler .getIndividualOrientedExportHandlers().get(sExportFormat); AbstractMarkerOrientedExportHandler markerOrientedExportHandler = AbstractMarkerOrientedExportHandler .getMarkerOrientedExportHandlers().get(sExportFormat); GenotypingProject project = mongoTemplate.findById(projId, GenotypingProject.class); String filename = sModule + "_" + project.getName() + "_" + new SimpleDateFormat("yyyy-MM-dd").format(new Date()) + "_" + count + "variants_" + sExportFormat + "." + (individualOrientedExportHandler != null ? individualOrientedExportHandler : markerOrientedExportHandler).getExportFileExtension(); OutputStream os; LOG.info((fKeepExportOnServer ? "On-server" : "Direct-download") + " export requested: " + token); if (fKeepExportOnServer) { String relativeOutputFolder = File.separator + FRONTEND_URL + File.separator + TMP_OUTPUT_FOLDER + File.separator + token.replaceAll("\\|", "_") + File.separator; File outputLocation = new File( request.getSession().getServletContext().getRealPath(relativeOutputFolder)); if (!outputLocation.exists() && !outputLocation.mkdirs()) throw new Exception("Unable to create folder: " + outputLocation); os = new FileOutputStream(new File(outputLocation.getAbsolutePath() + File.separator + filename)); response.setContentType("text/plain"); } else { os = response.getOutputStream(); response.setContentType("application/zip"); response.setHeader("Content-disposition", "inline; filename=" + filename); } ArrayList<SampleId> sampleIDs = new ArrayList<SampleId>(); for (String individual : selectedIndividualList) for (Integer individualSampleIndex : project.getIndividualSampleIndexes(individual)) sampleIDs.add(new SampleId(projId, individualSampleIndex)); if (fKeepExportOnServer) { String relativeOutputFolder = FRONTEND_URL + File.separator + TMP_OUTPUT_FOLDER + File.separator + token.replaceAll("\\|", "_") + File.separator; String relativeOutputFolderUrl = request.getContextPath() + "/" + relativeOutputFolder.replace(File.separator, "/"); String exportURL = relativeOutputFolderUrl + filename; LOG.debug("On-server export file for export " + token + ": " + exportURL); response.getWriter().write(exportURL); response.flushBuffer(); } // else // { // // The two next lines are an ugly hack that makes the client believe transfer has started. Otherwise we may end-up with a client-side timeout (search for network.http.response.timeout for more details) // response.getOutputStream().print(" "); // response.getOutputStream().flush(); // } HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>(); annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold); annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold); if (individualOrientedExportHandler != null) { progress.addStep("Reading and re-organizing genotypes"); // initial step will consist in organizing genotypes by individual rather than by marker progress.moveToNextStep(); // done with identifying variants TreeMap<String, File> exportFiles = individualOrientedExportHandler.createExportFiles(sModule, markerCursor.copy(), sampleIDs, new ArrayList<SampleId>(), token, annotationFieldThresholds, new HashMap<String, Integer>(), project.getSampleIdToIndividualMap(selectedIndividualList), progress); if (!progress.hasAborted()) { for (String step : individualOrientedExportHandler.getStepList()) progress.addStep(step); progress.moveToNextStep(); individualOrientedExportHandler.exportData(os, sModule, exportFiles.values(), true, progress, markerCursor, null, null); } } else if (markerOrientedExportHandler != null) { for (String step : markerOrientedExportHandler.getStepList()) progress.addStep(step); progress.moveToNextStep(); // done with identifying variants markerOrientedExportHandler.exportData(os, sModule, sampleIDs, new ArrayList<SampleId>(), progress, markerCursor, null, annotationFieldThresholds, new HashMap<String, Integer>(), project.getSampleIdToIndividualMap(selectedIndividualList), null); LOG.debug("done with exportData"); } else throw new Exception("No export handler found for format " + sExportFormat); if (!progress.hasAborted()) { LOG.info("doVariantExport took " + (System.currentTimeMillis() - before) / 1000d + "s to process " + count + " variants and " + selectedIndividualList.size() + " individuals"); progress.markAsComplete(); } } catch (Throwable t) { LOG.error("Error exporting data", t); progress.setError("Error exporting data: " + t.getClass().getSimpleName() + (t.getMessage() != null ? " - " + t.getMessage() : "")); return; } finally { markerCursor.close(); } }
From source file:fr.cnes.sitools.datasource.mongodb.business.SitoolsMongoDBDataSource.java
License:Open Source License
/** * Method to close the result set/* w w w . j ava2s . c om*/ * * @param cursor * the {@link DBCursor} to close */ private void closeCursor(DBCursor cursor) { if (cursor != null) { cursor.close(); } }
From source file:fr.gouv.vitam.cases.DbRequest.java
License:Open Source License
private final ResultInterface getRequestDomain(final TypeRequest request, final ResultInterface previous, final boolean useStart) throws InvalidExecOperationException, InstantiationException, IllegalAccessException { // must be MD if (request.isOnlyES) { throw new InvalidExecOperationException("Expression is not valid for Domain"); }/*from w ww. j a v a2 s .c o m*/ if (request.requestModel == null) { throw new InvalidExecOperationException( "Expression is not valid for Domain since no Request is available"); } final String srequest = request.requestModel.toString(); final BasicDBObject condition = (BasicDBObject) JSON.parse(srequest); final ResultInterface newResult = CassandraAccess.createOneResult(); newResult.setMinLevel(1); newResult.setMaxLevel(1); if (simulate) { LOGGER.info("ReqDomain: {}\n\t{}", condition, ID_NBCHILD); return createFalseResult(null, 1); } LOGGER.debug("ReqDomain: {}\n\t{}", condition, ID_NBCHILD); if (GlobalDatas.PRINT_REQUEST) { LOGGER.warn("ReqDomain: {}\n\t{}", condition, ID_NBCHILD); } final DBCursor cursor = mdAccess.find(mdAccess.domains, condition, ID_NBCHILD); long tempCount = 0; while (cursor.hasNext()) { final Domain dom = (Domain) cursor.next(); final String mid = dom.getId(); if (useStart) { if (previous.getCurrentDaip().contains(mid)) { newResult.getCurrentDaip().add(mid); tempCount += dom.getLong(Domain.NBCHILD); } } else { newResult.getCurrentDaip().add(mid); tempCount += dom.getLong(Domain.NBCHILD); } } cursor.close(); newResult.setNbSubNodes(tempCount); // filter on Ancestor if (!useStart && !previous.checkAncestor(mdAccess, newResult)) { LOGGER.error("No ancestor"); return null; } // Compute of MinMax if valid since path = 1 length (root) newResult.updateMinMax(); if (GlobalDatas.PRINT_REQUEST) { newResult.putBeforeSave(); LOGGER.warn("Dom: {}", newResult); } return newResult; }
From source file:fr.gouv.vitam.cases.DbRequest.java
License:Open Source License
private final ResultInterface getRequest1LevelMaipFromMD(final TypeRequest request, final ResultInterface previous, final boolean useStart) throws InvalidExecOperationException, InstantiationException, IllegalAccessException { BasicDBObject query = null;//w w w. j a v a 2 s . c om if (request.requestModel == null) { throw new InvalidExecOperationException( "Expression is not valid for Daip Level 1 with MD only since no MD request is available"); } if (useStart) { query = getInClauseForField(DAip.ID, previous.getCurrentDaip()); } else { if (previous.getMinLevel() == 1) { query = getInClauseForField(CassandraAccess.VitamLinks.Domain2DAip.field2to1, previous.getCurrentDaip()); } else { query = getInClauseForField(CassandraAccess.VitamLinks.DAip2DAip.field2to1, previous.getCurrentDaip()); } } final String srequest = request.requestModel.toString(); final BasicDBObject condition = (BasicDBObject) JSON.parse(srequest); query.putAll((BSONObject) condition); final ResultInterface subresult = CassandraAccess.createOneResult(); if (simulate) { LOGGER.info("Req1LevelMD: {}", query); return createFalseResult(previous, 1); } LOGGER.debug("Req1LevelMD: {}", query); if (GlobalDatas.PRINT_REQUEST) { LOGGER.warn("Req1LevelMD: {}", query); } final DBCursor cursor = mdAccess.find(mdAccess.daips, query, ID_NBCHILD); long tempCount = 0; while (cursor.hasNext()) { final DAip maip = (DAip) cursor.next(); final String mid = maip.getId(); if (useStart) { if (previous.getCurrentDaip().contains(mid)) { subresult.getCurrentDaip().add(mid); tempCount += maip.getLong(Domain.NBCHILD); } } else { subresult.getCurrentDaip().add(mid); tempCount += maip.getLong(Domain.NBCHILD); } } cursor.close(); subresult.setNbSubNodes(tempCount); // filter on Ancestor if (!useStart && !previous.checkAncestor(mdAccess, subresult)) { LOGGER.error("No ancestor for " + query + "\n" + previous.getCurrentDaip() + " not in " + subresult.getCurrentDaip()); return null; } // Not updateMinMax since result is not "valid" path but node UUID and not needed subresult.setMinLevel(previous.getMinLevel() + 1); subresult.setMaxLevel(previous.getMaxLevel() + 1); if (GlobalDatas.PRINT_REQUEST) { subresult.putBeforeSave(); LOGGER.warn("MetaAip2: {}", subresult); } return subresult; }
From source file:fr.gouv.vitam.cases.DbRequest.java
License:Open Source License
private final ResultInterface getRequestNegativeRelativeDepthFromMD(final TypeRequest request, final ResultInterface previous, final boolean useStart) throws InvalidExecOperationException, InstantiationException, IllegalAccessException { BasicDBObject query = null;//from w ww . jav a 2s . c o m if (request.requestModel == null) { throw new InvalidExecOperationException("Expression is not valid for Daip Level " + request.relativedepth + " with MD only since no MD request is available"); } if (useStart) { throw new InvalidExecOperationException("Cannot make a negative path when starting up"); } if (simulate) { LOGGER.info("Req-xLevelMD"); return createFalseResult(previous, 1); } int distance = -request.relativedepth; Set<String> subset = new HashSet<String>(); for (String prev : previous.getCurrentDaip()) { DAip dprev = DAip.findOne(mdAccess, prev); Map<String, Integer> parents = dprev.getDomDepth(); for (Entry<String, Integer> elt : parents.entrySet()) { if (elt.getValue() == distance) { subset.add(elt.getKey()); } } } // Use ID and not graph dependencies query = getInClauseForField(DAip.ID, subset); final String srequest = request.requestModel.toString(); final BasicDBObject condition = (BasicDBObject) JSON.parse(srequest); query.putAll((BSONObject) condition); final ResultInterface subresult = CassandraAccess.createOneResult(); LOGGER.debug("Req-xLevelMD: {}", query); if (GlobalDatas.PRINT_REQUEST) { LOGGER.warn("Req-xLevelMD: {}", query); } final DBCursor cursor = mdAccess.find(mdAccess.daips, query, ID_NBCHILD); long tempCount = 0; subresult.setMinLevel(previous.getMaxLevel()); subresult.setMaxLevel(0); while (cursor.hasNext()) { final DAip maip = (DAip) cursor.next(); final String mid = maip.getId(); subresult.getCurrentDaip().add(mid); maip.load(mdAccess); tempCount += maip.getLong(DAip.NBCHILD); // Not updateMinMax since result is not "valid" path but node UUID and not needed int max = maip.getMaxDepth(); if (subresult.getMaxLevel() < max) { subresult.setMaxLevel(max); } if (subresult.getMinLevel() > max) { subresult.setMinLevel(max); } } cursor.close(); subresult.setNbSubNodes(tempCount); if (GlobalDatas.PRINT_REQUEST) { subresult.putBeforeSave(); LOGGER.warn("MetaAip2: {}", subresult); } return subresult; }