List of usage examples for java.util.zip ZipOutputStream write
public synchronized void write(byte[] b, int off, int len) throws IOException
From source file:it.cnr.icar.eric.server.query.CompressContentQueryFilterPlugin.java
private void internalAddRepositoryItemToZipFile(RepositoryItem ri, ZipOutputStream zos, String zipEntryValue) throws IOException { if (ri != null) { int BUFFER = 2048; byte data[] = new byte[BUFFER]; BufferedInputStream origin = null; try {/*from w w w . j ava2 s . c o m*/ InputStream is = ri.getDataHandler().getInputStream(); origin = new BufferedInputStream(is, BUFFER); ZipEntry entry = new ZipEntry(zipEntryValue); zos.putNextEntry(entry); int count; while ((count = origin.read(data, 0, BUFFER)) != -1) { zos.write(data, 0, count); } } finally { if (origin != null) { try { origin.close(); } catch (Throwable t) { origin = null; } } } } }
From source file:org.eclairjs.nashorn.Utils.java
private static void addToZip(String path, String srcFile, ZipOutputStream zipOut, String[] filesToInclude) throws IOException { Logger logger = Logger.getLogger(Utils.class); int DEFAULT_BUFFER_SIZE = 4096; FileFilter filter = new FileFilter() { public boolean accept(File file) { if (Arrays.asList(filesToInclude).contains(file.getName()) || file.isDirectory()) { return true; } else { // logger.debug("Skipping not including in zipfile: "+file.getName()); }//from w ww . j a va2 s .c om return false; } }; File file = new File(srcFile); String filePath = "".equals(path) ? file.getName() : path + "/" + file.getName(); if (file.isDirectory()) { for (File childFile : file.listFiles(filter)) { addToZip(filePath, srcFile + "/" + childFile.getName(), zipOut, filesToInclude); } } else { logger.debug("Adding to zipfile: " + filePath); zipOut.putNextEntry(new ZipEntry(filePath)); FileInputStream in = new FileInputStream(srcFile); byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; int len; while ((len = in.read(buffer)) != -1) { zipOut.write(buffer, 0, len); } in.close(); } }
From source file:com.taobao.android.tpatch.utils.JarSplitUtils.java
/** * jarclass//ww w . j av a 2s . c o m * @param inJar * @param removeClasses */ public static void removeFilesFromJar(File inJar, List<String> removeClasses) throws IOException { if (null == removeClasses || removeClasses.isEmpty()) { return; } File outJar = new File(inJar.getParentFile(), inJar.getName() + ".tmp"); File outParentFolder = outJar.getParentFile(); if (!outParentFolder.exists()) { outParentFolder.mkdirs(); } FileOutputStream fos = new FileOutputStream(outJar); ZipOutputStream jos = new ZipOutputStream(fos); final byte[] buffer = new byte[8192]; FileInputStream fis = new FileInputStream(inJar); ZipInputStream zis = new ZipInputStream(fis); try { // loop on the entries of the jar file package and put them in the final jar ZipEntry entry; while ((entry = zis.getNextEntry()) != null) { // do not take directories or anything inside a potential META-INF folder. if (entry.isDirectory() || !entry.getName().endsWith(".class")) { continue; } String name = entry.getName(); String className = getClassName(name); if (removeClasses.contains(className)) { continue; } JarEntry newEntry; // Preserve the STORED method of the input entry. if (entry.getMethod() == JarEntry.STORED) { newEntry = new JarEntry(entry); } else { // Create a new entry so that the compressed len is recomputed. newEntry = new JarEntry(name); } // add the entry to the jar archive jos.putNextEntry(newEntry); // read the content of the entry from the input stream, and write it into the archive. int count; while ((count = zis.read(buffer)) != -1) { jos.write(buffer, 0, count); } // close the entries for this file jos.closeEntry(); zis.closeEntry(); } } finally { zis.close(); } fis.close(); jos.close(); FileUtils.deleteQuietly(inJar); FileUtils.moveFile(outJar, inJar); }
From source file:org.apache.roller.weblogger.ui.struts2.editor.WeblogExport.java
/** * Returns an output stream to the client of all uploaded resource files as * a ZIP archive.//from w ww . j av a2 s . co m */ public void exportResources() { SimpleDateFormat dateFormat; dateFormat = new SimpleDateFormat("MMddyyyy'T'HHmmss"); StringBuilder fileName; fileName = new StringBuilder(); fileName.append(getActionWeblog().getHandle()); fileName.append("-resources-"); fileName.append(dateFormat.format(System.currentTimeMillis())); fileName.append(".zip"); if (!response.isCommitted()) { response.reset(); response.setContentType("application/zip"); response.setHeader("Content-Disposition", "attachment; filename=\"" + fileName.toString() + "\""); try { MediaFileManager fmgr = WebloggerFactory.getWeblogger().getMediaFileManager(); List<MediaFile> resources = new ArrayList<MediaFile>(); // Load the contents of any sub-directories for (MediaFileDirectory mdir : fmgr.getMediaFileDirectories(getActionWeblog())) { loadResources(resources, mdir); } // Load the files at the root of the specific upload directory loadResources(resources, null); // Create a buffer for reading the files byte[] buffer; buffer = new byte[1024]; ServletOutputStream servletOutput; servletOutput = response.getOutputStream(); ZipOutputStream zipOutput; zipOutput = new ZipOutputStream(servletOutput); for (MediaFile resource : resources) { InputStream input; input = resource.getInputStream(); // Add a new ZIP entry to output stream zipOutput.putNextEntry(new ZipEntry(resource.getPath())); int length; while ((length = input.read(buffer)) > 0) { zipOutput.write(buffer, 0, length); } // Cleanup the entry input.close(); zipOutput.closeEntry(); } // Cleanup the output stream zipOutput.flush(); zipOutput.close(); } catch (Exception e) { log.error("Error exporting resources: " + e.getMessage()); } } }
From source file:fr.cirad.mgdb.exporting.markeroriented.EigenstratExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { // long before = System.currentTimeMillis(); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); File snpFile = null;/* w w w .j a v a 2 s . c o m*/ try { snpFile = File.createTempFile("snpFile", ""); FileWriter snpFileWriter = new FileWriter(snpFile); ZipOutputStream zos = new ZipOutputStream(outputStream); if (ByteArrayOutputStream.class.isAssignableFrom(outputStream.getClass())) zos.setLevel(ZipOutputStream.STORED); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); } } MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); StringBuffer indFileContents = new StringBuffer(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); indFileContents .append(individual.getId() + "\t" + getIndividualGenderCode(sModule, individual.getId()) + "\t" + (individual.getPopulation() == null ? "." : individual.getPopulation()) + LINE_SEPARATOR); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".ind")); zos.write(indFileContents.toString().getBytes()); zos.putNextEntry(new ZipEntry(exportName + ".eigenstratgeno")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats() .get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":"); if (chromAndPos.size() == 0) LOG.warn("Chromosomal position not found for marker " + variantId); // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } snpFileWriter.write(variantId + "\t" + (chromAndPos.size() == 0 ? "0" : chromAndPos.get(0)) + "\t" + 0 + "\t" + (chromAndPos.size() == 0 ? 0l : Long.parseLong(chromAndPos.get(1))) + LINE_SEPARATOR); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String individualId = individuals .get(sampleIDs .indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); Integer gq = null; try { gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ); } catch (Exception ignored) { } if (gq != null && gq < nMinimumGenotypeQuality) continue; Integer dp = null; try { dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP); } catch (Exception ignored) { } if (dp != null && dp < nMinimumReadDepth) continue; String gtCode = sampleGenotype.getCode(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); } for (int j = 0; j < individualList .size(); j++ /* we use this list because it has the proper ordering*/) { String individualId = individualList.get(j); List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>() : variant.getAllelesFromGenotypeCode(mostFrequentGenotype); int nOutputCode = 0; if (mostFrequentGenotype == null) nOutputCode = 9; else for (String all : Helper.split(mostFrequentGenotype, "/")) if ("0".equals(all)) nOutputCode++; if (j == 0 && variant.getKnownAlleleList().size() > 2) warningFileWriter.write("- Variant " + variant.getId() + " is multi-allelic. Make sure Eigenstrat genotype encoding specifications are suitable for you.\n"); zos.write(("" + nOutputCode).getBytes()); if (genotypeCounts.size() > 1 || alleles.size() > 2) { if (genotypeCounts.size() > 1) warningFileWriter.write("- Dissimilar genotypes found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting most frequent: " + nOutputCode + "\n"); if (alleles.size() > 2) warningFileWriter.write("- More than 2 alleles found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting only the first 2 alleles.\n"); } } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("============= exportData: " + nProgress + "% =============" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } snpFileWriter.close(); zos.putNextEntry(new ZipEntry(exportName + ".snp")); BufferedReader in = new BufferedReader(new FileReader(snpFile)); String sLine; while ((sLine = in.readLine()) != null) zos.write((sLine + "\n").getBytes()); in.close(); warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; in = new BufferedReader(new FileReader(warningFile)); while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); } finally { if (snpFile != null && snpFile.exists()) snpFile.delete(); } }
From source file:interactivespaces.workbench.project.java.BndOsgiContainerBundleCreator.java
/** * Write out the contents of the folder to the distribution file. * * @param directory//from w w w .j av a 2 s .c o m * folder being written to the build * @param buf * a buffer for caching info * @param jarOutputStream * the stream where the jar is being written * @param parentPath * path up to this point * * @throws IOException * for IO access errors */ private void writeJarFile(File directory, byte[] buf, ZipOutputStream jarOutputStream, String parentPath) throws IOException { File[] files = directory.listFiles(); if (files == null || files.length == 0) { log.warn("No source files found in " + directory.getAbsolutePath()); return; } for (File file : files) { if (file.isDirectory()) { writeJarFile(file, buf, jarOutputStream, parentPath + file.getName() + "/"); } else { FileInputStream in = null; try { in = new FileInputStream(file); // Add ZIP entry to output stream. jarOutputStream.putNextEntry(new JarEntry(parentPath + file.getName())); // Transfer bytes from the file to the ZIP file int len; while ((len = in.read(buf)) > 0) { jarOutputStream.write(buf, 0, len); } // Complete the entry jarOutputStream.closeEntry(); } finally { fileSupport.close(in, false); } } } }
From source file:au.org.ala.bhl.service.DocumentCacheService.java
public void compressPages(ItemDescriptor itemDesc) { File itemDir = new File(getItemDirectoryPath(itemDesc.getInternetArchiveId())); File file = getPageArchiveFile(itemDesc); if (file.exists()) { log("Deleting existing archive file: %s", file.getAbsolutePath()); file.delete();/*from w w w. j ava 2 s . c o m*/ } try { File[] candidates = itemDir.listFiles(); int pageCount = 0; ZipOutputStream out = null; for (File candidate : candidates) { Matcher m = PAGE_FILE_REGEX.matcher(candidate.getName()); if (m.matches()) { if (out == null) { out = new ZipOutputStream(new FileOutputStream(file)); } pageCount++; FileInputStream in = new FileInputStream(candidate); out.putNextEntry(new ZipEntry(candidate.getName())); byte[] buf = new byte[2048]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } out.closeEntry(); in.close(); candidate.delete(); } } if (out != null) { out.close(); log("%d pages add to pages.zip for item %s", pageCount, itemDesc); } else { log("No pages for item %s", itemDesc); } } catch (Exception ex) { throw new RuntimeException(ex); } }
From source file:com.ephesoft.dcma.util.FileUtils.java
/** * This method zips the contents of Directory specified into a zip file whose name is provided. * // ww w .j a v a 2 s . co m * @param dir2zip {@link String} * @param zout {@link String} * @param dir2zipName {@link String} * @throws IOException in case of error */ public static void zipDirectory(String dir2zip, ZipOutputStream zout, String dir2zipName) throws IOException { File srcDir = new File(dir2zip); List<String> fileList = listDirectory(srcDir); for (String fileName : fileList) { File file = new File(srcDir.getParent(), fileName); String zipName = fileName; if (File.separatorChar != FORWARD_SLASH) { zipName = fileName.replace(File.separatorChar, FORWARD_SLASH); } zipName = zipName.substring( zipName.indexOf(dir2zipName + BACKWARD_SLASH) + 1 + (dir2zipName + BACKWARD_SLASH).length()); ZipEntry zipEntry; if (file.isFile()) { zipEntry = new ZipEntry(zipName); zipEntry.setTime(file.lastModified()); zout.putNextEntry(zipEntry); FileInputStream fin = new FileInputStream(file); byte[] buffer = new byte[UtilConstants.BUFFER_CONST]; for (int n; (n = fin.read(buffer)) > 0;) { zout.write(buffer, 0, n); } if (fin != null) { fin.close(); } } else { zipEntry = new ZipEntry(zipName + FORWARD_SLASH); zipEntry.setTime(file.lastModified()); zout.putNextEntry(zipEntry); } } if (zout != null) { zout.close(); } }
From source file:fi.mikuz.boarder.util.FileProcessor.java
void zipAddDir(File dirObj, ZipOutputStream out) throws IOException { File[] files = dirObj.listFiles(); byte[] tmpBuf = new byte[1024]; for (int i = 0; i < files.length; i++) { if (files[i].isDirectory()) { zipAddDir(files[i], out);//from www .jav a 2s . c om continue; } FileInputStream in = new FileInputStream(files[i].getAbsolutePath()); System.out.println(" Adding: " + files[i].getAbsolutePath().substring(mBoardDirLength)); out.putNextEntry(new ZipEntry(files[i].getAbsolutePath().substring(mBoardDirLength))); int len; while ((len = in.read(tmpBuf)) > 0) { out.write(tmpBuf, 0, len); } out.closeEntry(); in.close(); } }