List of usage examples for java.util.zip ZipOutputStream ZipOutputStream
public ZipOutputStream(OutputStream out)
From source file:fr.cirad.mgdb.exporting.individualoriented.DARwinExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles, boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles) throws Exception { MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); GenotypingProject aProject = mongoTemplate.findOne( new Query(Criteria.where(GenotypingProject.FIELDNAME_PLOIDY_LEVEL).exists(true)), GenotypingProject.class); if (aProject == null) LOG.warn("Unable to find a project containing ploidy level information! Assuming ploidy level is 2."); int ploidy = aProject == null ? 2 : aProject.getPloidyLevel(); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }//from w w w . ja v a 2s. c o m } String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size() + "individuals"; StringBuffer donFileContents = new StringBuffer( "@DARwin 5.0 - DON -" + LINE_SEPARATOR + individualExportFiles.size() + "\t" + 1 + LINE_SEPARATOR + "N" + "\t" + "individual" + LINE_SEPARATOR); int count = 0; String missingGenotype = ""; for (int j = 0; j < ploidy; j++) missingGenotype += "\tN"; zos.putNextEntry(new ZipEntry(exportName + ".var")); zos.write(("@DARwin 5.0 - ALLELIC - " + ploidy + LINE_SEPARATOR + individualExportFiles.size() + "\t" + markerCount * ploidy + LINE_SEPARATOR + "N").getBytes()); DBCursor markerCursorCopy = markerCursor.copy(); // dunno how expensive this is, but seems safer than keeping all IDs in memory at any time short nProgress = 0, nPreviousProgress = 0; int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; markerCursorCopy.batchSize(nChunkSize); int nMarkerIndex = 0; while (markerCursorCopy.hasNext()) { DBObject exportVariant = markerCursorCopy.next(); Comparable markerId = (Comparable) exportVariant.get("_id"); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(markerId); if (syn != null) markerId = syn; } for (int j = 0; j < ploidy; j++) zos.write(("\t" + markerId).getBytes()); } TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>(); ArrayList<String> distinctAlleles = new ArrayList<String>(); // the index of each allele will be used as its code int i = 0; for (File f : individualExportFiles) { BufferedReader in = new BufferedReader(new FileReader(f)); try { String individualId, line = in.readLine(); // read sample id if (line != null) individualId = line; else throw new Exception("Unable to read first line of temp export file " + f.getName()); donFileContents.append(++count + "\t" + individualId + LINE_SEPARATOR); zos.write((LINE_SEPARATOR + count).getBytes()); nMarkerIndex = 0; while ((line = in.readLine()) != null) { List<String> genotypes = MgdbDao.split(line, "|"); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } if (genotypeCounts.size() > 1) { warningFileWriter.write("- Dissimilar genotypes found for variant __" + nMarkerIndex + "__, individual " + individualId + ". Exporting most frequent: " + mostFrequentGenotype + "\n"); problematicMarkerIndexToNameMap.put(nMarkerIndex, ""); } String codedGenotype = ""; if (mostFrequentGenotype != null) for (String allele : mostFrequentGenotype.split(" ")) { if (!distinctAlleles.contains(allele)) distinctAlleles.add(allele); codedGenotype += "\t" + distinctAlleles.indexOf(allele); } else codedGenotype = missingGenotype.replaceAll("N", "-1"); // missing data is coded as -1 zos.write(codedGenotype.getBytes()); nMarkerIndex++; } } catch (Exception e) { LOG.error("Error exporting data", e); progress.setError("Error exporting data: " + e.getClass().getSimpleName() + (e.getMessage() != null ? " - " + e.getMessage() : "")); return; } finally { in.close(); } if (progress.hasAborted()) return; nProgress = (short) (++i * 100 / individualExportFiles.size()); if (nProgress > nPreviousProgress) { // LOG.debug("============= doDARwinExport (" + i + "): " + nProgress + "% ============="); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } if (!f.delete()) { f.deleteOnExit(); LOG.info("Unable to delete tmp export file " + f.getAbsolutePath()); } } zos.putNextEntry(new ZipEntry(exportName + ".don")); zos.write(donFileContents.toString().getBytes()); // now read variant names for those that induced warnings nMarkerIndex = 0; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext()) { DBObject exportVariant = markerCursor.next(); if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) { Comparable markerId = (Comparable) exportVariant.get("_id"); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(markerId); if (syn != null) markerId = syn; } for (int j = 0; j < ploidy; j++) zos.write(("\t" + markerId).getBytes()); problematicMarkerIndexToNameMap.put(nMarkerIndex, markerId); } } warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet()) sLine = sLine.replaceAll("__" + aMarkerIndex + "__", problematicMarkerIndexToNameMap.get(aMarkerIndex).toString()); zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }
From source file:com.adobe.communities.ugc.migration.legacyExport.GenericExportServlet.java
@Override protected void doGet(final SlingHttpServletRequest request, final SlingHttpServletResponse response) throws ServletException, IOException { if (!request.getRequestParameterMap().containsKey("path")) { throw new ServletException("No path specified for export. Exiting."); }/*from www . j a v a 2 s .co m*/ final String path = StringUtils.stripEnd(request.getRequestParameter("path").getString(), "/"); final Resource resource = request.getResourceResolver().getResource(path); if (resource == null) { throw new ServletException("Could not find a valid resource for export"); } entries = new HashMap<String, Boolean>(); entriesToSkip = new HashMap<String, Boolean>(); File outFile = null; try { outFile = File.createTempFile(UUID.randomUUID().toString(), ".zip"); if (!outFile.canWrite()) { throw new ServletException("Cannot write to specified output file"); } response.setContentType("application/octet-stream"); final String headerKey = "Content-Disposition"; final String headerValue = "attachment; filename=\"export.zip\""; response.setHeader(headerKey, headerValue); FileOutputStream fos = new FileOutputStream(outFile); BufferedOutputStream bos = new BufferedOutputStream(fos); zip = new ZipOutputStream(bos); OutputStream outStream = null; InputStream inStream = null; try { exportContent(resource, path); if (entries.size() > 0) { exportCommentSystems(entries, entriesToSkip, resource, path); } IOUtils.closeQuietly(zip); IOUtils.closeQuietly(bos); IOUtils.closeQuietly(fos); // obtains response's output stream outStream = response.getOutputStream(); inStream = new FileInputStream(outFile); // copy from file to output IOUtils.copy(inStream, outStream); } catch (final IOException e) { throw new ServletException(e); } catch (final Exception e) { throw new ServletException(e); } finally { IOUtils.closeQuietly(zip); IOUtils.closeQuietly(bos); IOUtils.closeQuietly(fos); IOUtils.closeQuietly(inStream); IOUtils.closeQuietly(outStream); } } finally { if (outFile != null) { outFile.delete(); } } }
From source file:edu.umd.cs.submitServer.servlets.UploadSubmission.java
@Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { long now = System.currentTimeMillis(); Timestamp submissionTimestamp = new Timestamp(now); // these are set by filters or previous servlets Project project = (Project) request.getAttribute(PROJECT); StudentRegistration studentRegistration = (StudentRegistration) request.getAttribute(STUDENT_REGISTRATION); MultipartRequest multipartRequest = (MultipartRequest) request.getAttribute(MULTIPART_REQUEST); boolean webBasedUpload = ((Boolean) request.getAttribute("webBasedUpload")).booleanValue(); String clientTool = multipartRequest.getCheckedParameter("submitClientTool"); String clientVersion = multipartRequest.getOptionalCheckedParameter("submitClientVersion"); String cvsTimestamp = multipartRequest.getOptionalCheckedParameter("cvstagTimestamp"); Collection<FileItem> files = multipartRequest.getFileItems(); Kind kind;//from www . j a v a 2 s . com byte[] zipOutput = null; // zipped version of bytesForUpload boolean fixedZip = false; try { if (files.size() > 1) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(bos); for (FileItem item : files) { String name = item.getName(); if (name == null || name.length() == 0) continue; byte[] bytes = item.get(); ZipEntry zentry = new ZipEntry(name); zentry.setSize(bytes.length); zentry.setTime(now); zos.putNextEntry(zentry); zos.write(bytes); zos.closeEntry(); } zos.flush(); zos.close(); zipOutput = bos.toByteArray(); kind = Kind.MULTIFILE_UPLOAD; } else { FileItem fileItem = multipartRequest.getFileItem(); if (fileItem == null) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "There was a problem processing your submission. " + "No files were found in your submission"); return; } // get size in bytes long sizeInBytes = fileItem.getSize(); if (sizeInBytes == 0 || sizeInBytes > Integer.MAX_VALUE) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Trying upload file of size " + sizeInBytes); return; } // copy the fileItem into a byte array byte[] bytesForUpload = fileItem.get(); String fileName = fileItem.getName(); boolean isSpecialSingleFile = OfficeFileName.matcher(fileName).matches(); FormatDescription desc = FormatIdentification.identify(bytesForUpload); if (!isSpecialSingleFile && desc != null && desc.getMimeType().equals("application/zip")) { fixedZip = FixZip.hasProblem(bytesForUpload); kind = Kind.ZIP_UPLOAD; if (fixedZip) { bytesForUpload = FixZip.fixProblem(bytesForUpload, studentRegistration.getStudentRegistrationPK()); kind = Kind.FIXED_ZIP_UPLOAD; } zipOutput = bytesForUpload; } else { // ========================================================================================== // [NAT] [Buffer to ZIP Part] // Check the type of the upload and convert to zip format if // possible // NOTE: I use both MagicMatch and FormatDescription (above) // because MagicMatch was having // some trouble identifying all zips String mime = URLConnection.getFileNameMap().getContentTypeFor(fileName); if (!isSpecialSingleFile && mime == null) try { MagicMatch match = Magic.getMagicMatch(bytesForUpload, true); if (match != null) mime = match.getMimeType(); } catch (Exception e) { // leave mime as null } if (!isSpecialSingleFile && "application/zip".equalsIgnoreCase(mime)) { zipOutput = bytesForUpload; kind = Kind.ZIP_UPLOAD2; } else { InputStream ins = new ByteArrayInputStream(bytesForUpload); if ("application/x-gzip".equalsIgnoreCase(mime)) { ins = new GZIPInputStream(ins); } ByteArrayOutputStream bos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(bos); if (!isSpecialSingleFile && ("application/x-gzip".equalsIgnoreCase(mime) || "application/x-tar".equalsIgnoreCase(mime))) { kind = Kind.TAR_UPLOAD; TarInputStream tins = new TarInputStream(ins); TarEntry tarEntry = null; while ((tarEntry = tins.getNextEntry()) != null) { zos.putNextEntry(new ZipEntry(tarEntry.getName())); tins.copyEntryContents(zos); zos.closeEntry(); } tins.close(); } else { // Non-archive file type if (isSpecialSingleFile) kind = Kind.SPECIAL_ZIP_FILE; else kind = Kind.SINGLE_FILE; // Write bytes to a zip file ZipEntry zentry = new ZipEntry(fileName); zos.putNextEntry(zentry); zos.write(bytesForUpload); zos.closeEntry(); } zos.flush(); zos.close(); zipOutput = bos.toByteArray(); } // [END Buffer to ZIP Part] // ========================================================================================== } } } catch (NullPointerException e) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "There was a problem processing your submission. " + "You should submit files that are either zipped or jarred"); return; } finally { for (FileItem fItem : files) fItem.delete(); } if (webBasedUpload) { clientTool = "web"; clientVersion = kind.toString(); } Submission submission = uploadSubmission(project, studentRegistration, zipOutput, request, submissionTimestamp, clientTool, clientVersion, cvsTimestamp, getDatabaseProps(), getSubmitServerServletLog()); request.setAttribute("submission", submission); if (!webBasedUpload) { response.setContentType("text/plain"); PrintWriter out = response.getWriter(); out.println("Successful submission #" + submission.getSubmissionNumber() + " received for project " + project.getProjectNumber()); out.flush(); out.close(); return; } boolean instructorUpload = ((Boolean) request.getAttribute("instructorViewOfStudent")).booleanValue(); // boolean // isCanonicalSubmission="true".equals(request.getParameter("isCanonicalSubmission")); // set the successful submission as a request attribute String redirectUrl; if (fixedZip) { redirectUrl = request.getContextPath() + "/view/fixedSubmissionUpload.jsp?submissionPK=" + submission.getSubmissionPK(); } if (project.getCanonicalStudentRegistrationPK() == studentRegistration.getStudentRegistrationPK()) { redirectUrl = request.getContextPath() + "/view/instructor/projectUtilities.jsp?projectPK=" + project.getProjectPK(); } else if (instructorUpload) { redirectUrl = request.getContextPath() + "/view/instructor/project.jsp?projectPK=" + project.getProjectPK(); } else { redirectUrl = request.getContextPath() + "/view/project.jsp?projectPK=" + project.getProjectPK(); } response.sendRedirect(redirectUrl); }
From source file:com.stimulus.archiva.presentation.ExportBean.java
@Override protected StreamInfo getStreamInfo(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { SearchBean searchBean = (SearchBean) form; String outputDir = Config.getFileSystem().getViewPath() + File.separatorChar; SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); String zipFileName = "export-" + sdf.format(new Date()) + ".zip"; File zipFile = new File(outputDir + zipFileName); String agent = request.getHeader("USER-AGENT"); if (null != agent && -1 != agent.indexOf("MSIE")) { String codedfilename = URLEncoder.encode(zipFileName, "UTF8"); response.setContentType("application/x-download"); response.setHeader("Content-Disposition", "attachment;filename=" + codedfilename); } else if (null != agent && -1 != agent.indexOf("Mozilla")) { String codedfilename = MimeUtility.encodeText(zipFileName, "UTF8", "B"); response.setContentType("application/x-download"); response.setHeader("Content-Disposition", "attachment;filename=" + codedfilename); } else {// w w w . j av a 2 s .c o m response.setHeader("Content-Disposition", "attachment;filename=" + zipFileName); } logger.debug("size of searchResult = " + searchBean.getSearchResults().size()); //MessageBean.viewMessage List<File> files = new ArrayList<File>(); for (SearchResultBean searchResult : searchBean.getSearchResults()) { if (searchResult.getSelected()) { Email email = MessageService.getMessageByID(searchResult.getVolumeID(), searchResult.getUniqueID(), false); HttpServletRequest hsr = ActionContext.getActionContext().getRequest(); String baseURL = hsr.getRequestURL().substring(0, hsr.getRequestURL().lastIndexOf(hsr.getServletPath())); MessageExtraction messageExtraction = MessageService.extractMessage(email, baseURL, true); // can take a while to extract message // MessageBean mbean = new MessageBean(); // mbean.setMessageID(searchResult.getUniqueID()); // mbean.setVolumeID(searchResult.getVolumeID()); // writer.println(searchResult.toString()); // writer.println(messageExtraction.getFileName()); File fileToAdd = new File(outputDir, messageExtraction.getFileName()); if (!files.contains(fileToAdd)) { files.add(fileToAdd); } } } ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(zipFile)); try { byte[] buf = new byte[1024]; for (File f : files) { ZipEntry ze = new ZipEntry(f.getName()); logger.debug("Adding file " + f.getName()); zos.putNextEntry(ze); InputStream is = new BufferedInputStream(new FileInputStream(f)); for (;;) { int len = is.read(buf); if (len < 0) break; zos.write(buf, 0, len); } is.close(); Config.getFileSystem().getTempFiles().markForDeletion(f); } } finally { zos.close(); } logger.debug("download zipped emails {fileName='" + zipFileName + "'}"); String contentType = "application/zip"; Config.getFileSystem().getTempFiles().markForDeletion(zipFile); return new FileStreamInfo(contentType, zipFile); }
From source file:io.apicurio.hub.api.codegen.OpenApi2Thorntail.java
/** * Generate the Thorntail project.//w w w. j av a 2s . c o m * @throws IOException */ public ByteArrayOutputStream generate() throws IOException { ByteArrayOutputStream output = new ByteArrayOutputStream(); CodegenInfo info = getInfoFromApiDoc(); try (ZipOutputStream zos = new ZipOutputStream(output)) { if (!this.updateOnly) { zos.putNextEntry(new ZipEntry("pom.xml")); zos.write(generatePomXml(info).getBytes()); zos.closeEntry(); zos.putNextEntry(new ZipEntry("Dockerfile")); zos.write(generateDockerfile().getBytes()); zos.closeEntry(); zos.putNextEntry(new ZipEntry("openshift-template.yml")); zos.write(generateOpenshiftTemplate().getBytes()); zos.closeEntry(); zos.putNextEntry(new ZipEntry("src/main/resources/META-INF/microprofile-config.properties")); zos.write(generateMicroprofileConfigProperties().getBytes()); zos.closeEntry(); } zos.putNextEntry(new ZipEntry("src/main/resources/META-INF/openapi.json")); zos.write(this.openApiDoc.getBytes()); zos.closeEntry(); if (!this.updateOnly) { zos.putNextEntry( new ZipEntry(javaPackageToZipPath(this.settings.javaPackage) + "JaxRsApplication.java")); zos.write(generateJaxRsApplication().getBytes()); zos.closeEntry(); } for (CodegenJavaInterface iface : info.getInterfaces()) { String javaInterface = generateJavaInterface(iface); zos.putNextEntry( new ZipEntry(javaPackageToZipPath(iface.getPackage()) + iface.getName() + ".java")); zos.write(javaInterface.getBytes()); zos.closeEntry(); } IndexedCodeWriter codeWriter = new IndexedCodeWriter(); for (CodegenJavaBean bean : info.getBeans()) { generateJavaBean(bean, info, codeWriter); } for (String key : codeWriter.getKeys()) { zos.putNextEntry(new ZipEntry(javaClassToZipPath(key))); zos.write(codeWriter.get(key).getBytes()); zos.closeEntry(); } } return output; }
From source file:com.diffplug.gradle.ZipMisc.java
/** * Creates a single-entry zip file.//from w ww . java 2 s . co m * * @param input an uncompressed file * @param pathWithinArchive the path within the archive * @param output the new zip file it will be compressed into */ public static void zip(File input, String pathWithinArchive, File output) throws IOException { try (ZipOutputStream zipStream = new ZipOutputStream( new BufferedOutputStream(new FileOutputStream(output)))) { zipStream.setMethod(ZipOutputStream.DEFLATED); zipStream.setLevel(9); zipStream.putNextEntry(new ZipEntry(pathWithinArchive)); try (BufferedInputStream inputStream = new BufferedInputStream(new FileInputStream(input))) { copy(inputStream, zipStream); } } }
From source file:fr.cirad.mgdb.exporting.markeroriented.HapMapExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }// ww w . ja va2s. co m } List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".hapmap")); String header = "rs#" + "\t" + "alleles" + "\t" + "chrom" + "\t" + "pos" + "\t" + "strand" + "\t" + "assembly#" + "\t" + "center" + "\t" + "protLSID" + "\t" + "assayLSID" + "\t" + "panelLSID" + "\t" + "QCcode"; zos.write(header.getBytes()); for (int i = 0; i < individualList.size(); i++) { zos.write(("\t" + individualList.get(i)).getBytes()); } zos.write((LINE_SEPARATOR).getBytes()); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor == null || markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } boolean fIsSNP = variant.getType().equals(Type.SNP.toString()); byte[] missingGenotype = ("\t" + "NN").getBytes(); String[] chromAndPos = markerChromosomalPositions.get(variant.getId()).split(":"); zos.write(((variantId == null ? variant.getId() : variantId) + "\t" + StringUtils.join(variant.getKnownAlleleList(), "/") + "\t" + chromAndPos[0] + "\t" + Long.parseLong(chromAndPos[1]) + "\t" + "+").getBytes()); for (int j = 0; j < 6; j++) zos.write(("\t" + "NA").getBytes()); Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String gtCode = run.getSampleGenotypes().get(sampleIndex).getCode(); String individualId = individuals .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); gqValueForSampleId.put(individualId, (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ)); dpValueForSampleId.put(individualId, (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP)); } int writtenGenotypeCount = 0; for (String individualId : individualList /* we use this list because it has the proper ordering */) { int individualIndex = individualList.indexOf(individualId); while (writtenGenotypeCount < individualIndex - 1) { zos.write(missingGenotype); writtenGenotypeCount++; } List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ Integer gqValue = gqValueForSampleId.get(individualId); if (gqValue != null && gqValue < nMinimumGenotypeQuality) continue; /* skip this sample because its GQ is under the threshold */ Integer dpValue = dpValueForSampleId.get(individualId); if (dpValue != null && dpValue < nMinimumReadDepth) continue; /* skip this sample because its DP is under the threshold */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } byte[] exportedGT = mostFrequentGenotype == null ? missingGenotype : ("\t" + StringUtils.join(variant.getAllelesFromGenotypeCode(mostFrequentGenotype), fIsSNP ? "" : "/")).getBytes(); zos.write(exportedGT); writtenGenotypeCount++; if (genotypeCounts.size() > 1) warningFileWriter.write("- Dissimilar genotypes found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting most frequent: " + new String(exportedGT) + "\n"); } while (writtenGenotypeCount < individualList.size()) { zos.write(missingGenotype); writtenGenotypeCount++; } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }
From source file:com.heliosdecompiler.helios.gui.controller.FileTreeController.java
@FXML public void initialize() { this.rootItem = new TreeItem<>(new TreeNode("[root]")); this.root.setRoot(this.rootItem); this.root.setCellFactory(new TreeCellFactory<>(node -> { if (node.getParent() == null) { ContextMenu export = new ContextMenu(); MenuItem exportItem = new MenuItem("Export"); export.setOnAction(e -> { File file = messageHandler.chooseFile().withInitialDirectory(new File(".")) .withTitle(Message.GENERIC_CHOOSE_EXPORT_LOCATION_JAR.format()) .withExtensionFilter(new FileFilter(Message.FILETYPE_JAVA_ARCHIVE.format(), "*.jar"), true)/*ww w . j ava2 s.com*/ .promptSave(); OpenedFile openedFile = (OpenedFile) node.getMetadata().get(OpenedFile.OPENED_FILE); Map<String, byte[]> clone = new HashMap<>(openedFile.getContents()); backgroundTaskHelper.submit( new BackgroundTask(Message.TASK_SAVING_FILE.format(node.getDisplayName()), true, () -> { try { if (!file.exists()) { if (!file.createNewFile()) { throw new IOException("Could not create export file"); } } try (ZipOutputStream zipOutputStream = new ZipOutputStream( new FileOutputStream(file))) { for (Map.Entry<String, byte[]> ent : clone.entrySet()) { ZipEntry zipEntry = new ZipEntry(ent.getKey()); zipOutputStream.putNextEntry(zipEntry); zipOutputStream.write(ent.getValue()); zipOutputStream.closeEntry(); } } messageHandler.handleMessage(Message.GENERIC_EXPORTED.format()); } catch (IOException ex) { messageHandler.handleException(Message.ERROR_IOEXCEPTION_OCCURRED.format(), ex); } })); }); export.getItems().add(exportItem); return export; } return null; })); root.addEventHandler(KeyEvent.KEY_RELEASED, event -> { if (event.getCode() == KeyCode.ENTER) { TreeItem<TreeNode> selected = this.root.getSelectionModel().getSelectedItem(); if (selected != null) { if (selected.getChildren().size() != 0) { selected.setExpanded(!selected.isExpanded()); } else { getParentController().getAllFilesViewerController().handleClick(selected.getValue()); } } } }); Tooltip tooltip = new Tooltip(); StringBuilder search = new StringBuilder(); List<TreeItem<TreeNode>> searchContext = new ArrayList<>(); AtomicInteger searchIndex = new AtomicInteger(); root.focusedProperty().addListener((observable, oldValue, newValue) -> { if (!newValue) { tooltip.hide(); search.setLength(0); } }); root.boundsInLocalProperty().addListener((observable, oldValue, newValue) -> { Bounds bounds = root.localToScreen(newValue); tooltip.setAnchorX(bounds.getMinX()); tooltip.setAnchorY(bounds.getMinY()); }); root.addEventHandler(KeyEvent.KEY_PRESSED, event -> { if (tooltip.isShowing() && event.getCode() == KeyCode.UP) { if (searchIndex.decrementAndGet() < 0) { searchIndex.set(searchContext.size() - 1); } } else if (tooltip.isShowing() && event.getCode() == KeyCode.DOWN) { if (searchIndex.incrementAndGet() >= searchContext.size()) { searchIndex.set(0); } } else { return; } event.consume(); root.scrollTo(root.getRow(searchContext.get(searchIndex.get()))); root.getSelectionModel().select(searchContext.get(searchIndex.get())); }); root.addEventHandler(KeyEvent.KEY_TYPED, event -> { if (event.getCharacter().charAt(0) == '\b') { if (search.length() > 0) { search.setLength(search.length() - 1); } } else if (event.getCharacter().charAt(0) == '\u001B') { //esc tooltip.hide(); search.setLength(0); return; } else if (search.length() > 0 || (search.length() == 0 && StringUtils.isAlphanumeric(event.getCharacter()))) { search.append(event.getCharacter()); if (!tooltip.isShowing()) { tooltip.show(root.getScene().getWindow()); } } if (!tooltip.isShowing()) return; String str = search.toString(); tooltip.setText("Search for: " + str); searchContext.clear(); ArrayDeque<TreeItem<TreeNode>> deque = new ArrayDeque<>(); deque.addAll(rootItem.getChildren()); while (!deque.isEmpty()) { TreeItem<TreeNode> item = deque.poll(); if (item.getValue().getDisplayName().contains(str)) { searchContext.add(item); } if (item.isExpanded() && item.getChildren().size() > 0) deque.addAll(item.getChildren()); } searchIndex.set(0); if (searchContext.size() > 0) { root.scrollTo(root.getRow(searchContext.get(0))); root.getSelectionModel().select(searchContext.get(0)); } }); openedFileController.loadedFiles().addListener((MapChangeListener<String, OpenedFile>) change -> { if (change.getValueAdded() != null) { updateTree(change.getValueAdded()); } if (change.getValueRemoved() != null) { this.rootItem.getChildren() .removeIf(ti -> ti.getValue().equals(change.getValueRemoved().getRoot())); } }); }
From source file:ZipTransformTest.java
public void testStreamTransformer() throws IOException { final String name = "foo"; final byte[] contents = "bar".getBytes(); final byte[] transformed = "cbs".getBytes(); File file1 = File.createTempFile("temp", null); File file2 = File.createTempFile("temp", null); try {/*from w ww . jav a2 s.c om*/ // Create the ZIP file ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(file1)); try { zos.putNextEntry(new ZipEntry(name)); zos.write(contents); zos.closeEntry(); } finally { IOUtils.closeQuietly(zos); } // Transform the ZIP file ZipUtil.transformEntry(file1, name, new StreamZipEntryTransformer() { protected void transform(ZipEntry zipEntry, InputStream in, OutputStream out) throws IOException { int b; while ((b = in.read()) != -1) out.write(b + 1); } }, file2); // Test the ZipUtil byte[] actual = ZipUtil.unpackEntry(file2, name); assertNotNull(actual); assertEquals(new String(transformed), new String(actual)); } finally { FileUtils.deleteQuietly(file1); FileUtils.deleteQuietly(file2); } }
From source file:be.fedict.eid.applet.service.signer.odf.AbstractODFSignatureService.java
private void outputSignedOpenDocument(byte[] signatureData) throws IOException { LOG.debug("output signed open document"); OutputStream signedOdfOutputStream = getSignedOpenDocumentOutputStream(); if (null == signedOdfOutputStream) { throw new NullPointerException("signedOpenDocumentOutputStream is null"); }/*from w ww. j a va 2 s.c o m*/ /* * Copy the original ODF content to the signed ODF package. */ ZipOutputStream zipOutputStream = new ZipOutputStream(signedOdfOutputStream); ZipInputStream zipInputStream = new ZipInputStream(this.getOpenDocumentURL().openStream()); ZipEntry zipEntry; while (null != (zipEntry = zipInputStream.getNextEntry())) { if (!zipEntry.getName().equals(ODFUtil.SIGNATURE_FILE)) { zipOutputStream.putNextEntry(zipEntry); IOUtils.copy(zipInputStream, zipOutputStream); } } zipInputStream.close(); /* * Add the ODF XML signature file to the signed ODF package. */ zipEntry = new ZipEntry(ODFUtil.SIGNATURE_FILE); zipOutputStream.putNextEntry(zipEntry); IOUtils.write(signatureData, zipOutputStream); zipOutputStream.close(); }