List of usage examples for java.util.zip ZipEntry ZipEntry
public ZipEntry(ZipEntry e)
From source file:com.googlecode.dex2jar.v3.Dex2jar.java
private void check(String dir, ZipOutputStream zos) throws IOException { if (dirs.contains(dir)) { return;/* w ww . ja v a 2 s .c o m*/ } dirs.add(dir); int i = dir.lastIndexOf('/'); if (i > 0) { check(dir.substring(0, i), zos); } zos.putNextEntry(new ZipEntry(dir + "/")); zos.closeEntry(); }
From source file:com.rover12421.shaka.apktool.lib.AndrolibResourcesAj.java
/** * ??//from ww w.j av a 2s . co m */ private void fuckNotDefinedRes_clearAddRes(File apkFile) throws IOException, ShakaException { if (notDefinedRes.size() <= 0) { return; } File tempFile = File.createTempFile(apkFile.getName(), null); tempFile.delete(); tempFile.deleteOnExit(); boolean renameOk = apkFile.renameTo(tempFile); if (!renameOk) { throw new ShakaException( "could not rename the file " + apkFile.getAbsolutePath() + " to " + tempFile.getAbsolutePath()); } try (ZipInputStream zin = new ZipInputStream(new FileInputStream(tempFile)); ZipOutputStream zout = new ZipOutputStream(new FileOutputStream(apkFile))) { ZipEntry entry = zin.getNextEntry(); while (entry != null) { String name = entry.getName(); boolean toBeDeleted = false; for (String f : notDefinedRes) { if (f.equals(name)) { toBeDeleted = true; LogHelper.warning("Delete temp res : " + f); break; } } if (!toBeDeleted) { // Add ZIP entry to output stream. zout.putNextEntry(new ZipEntry(name)); // Transfer bytes from the ZIP file to the output file IOUtils.copy(zin, zout); } entry = zin.getNextEntry(); } } tempFile.delete(); notDefinedRes.clear(); }
From source file:com.mgmtp.jfunk.common.util.ExtendedFile.java
private void zip(String prefix, final File file, final ZipOutputStream zipOut) throws IOException { if (file.isDirectory()) { prefix = prefix + file.getName() + '/'; for (File child : file.listFiles()) { zip(prefix, child, zipOut);/* www . ja v a2s . c o m*/ } } else { FileInputStream in = null; try { in = new FileInputStream(file); zipOut.putNextEntry(new ZipEntry(prefix + file.getName())); IOUtils.copy(in, zipOut); } finally { IOUtils.closeQuietly(in); zipOut.flush(); zipOut.closeEntry(); } } }
From source file:com.intuit.tank.service.impl.v1.agent.AgentServiceV1.java
private void addFileToZip(String name, InputStream in, ZipOutputStream zip) throws Exception { try {/*from w w w . jav a2 s .c o m*/ zip.putNextEntry(new ZipEntry(name)); IOUtils.copy(in, zip); } finally { IOUtils.closeQuietly(in); } }
From source file:eu.esdihumboldt.hale.io.appschema.writer.AppSchemaMappingUploader.java
private byte[] writeContent(String mappingFileName, ContentType contentType, ProgressIndicator progress, IOReporter reporter) throws IOException { try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { if (contentType.equals(DataStoreFile.ZIP_CONTENT_TYPE)) { try (ZipOutputStream zos = new ZipOutputStream(bos)) { if (includeTargetSchema) { // add target schema to zip addTargetSchemaToZip(zos, null, progress, reporter); }//from w w w . j ava 2 s . co m // main mapping configuration file zos.putNextEntry(new ZipEntry(mappingFileName + ".appschema")); generator.writeMappingConf(zos); zos.closeEntry(); if (generator.getGeneratedMapping().requiresMultipleFiles()) { zos.putNextEntry(new ZipEntry(AppSchemaIO.INCLUDED_TYPES_MAPPING_FILE)); generator.writeIncludedTypesMappingConf(zos); zos.closeEntry(); } } } else { generator.writeMappingConf(bos); } return bos.toByteArray(); } }
From source file:fr.cirad.mgdb.exporting.markeroriented.EigenstratExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { // long before = System.currentTimeMillis(); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); File snpFile = null;//from w w w. ja v a 2 s . co m try { snpFile = File.createTempFile("snpFile", ""); FileWriter snpFileWriter = new FileWriter(snpFile); ZipOutputStream zos = new ZipOutputStream(outputStream); if (ByteArrayOutputStream.class.isAssignableFrom(outputStream.getClass())) zos.setLevel(ZipOutputStream.STORED); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); } } MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); StringBuffer indFileContents = new StringBuffer(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); indFileContents .append(individual.getId() + "\t" + getIndividualGenderCode(sModule, individual.getId()) + "\t" + (individual.getPopulation() == null ? "." : individual.getPopulation()) + LINE_SEPARATOR); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".ind")); zos.write(indFileContents.toString().getBytes()); zos.putNextEntry(new ZipEntry(exportName + ".eigenstratgeno")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats() .get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":"); if (chromAndPos.size() == 0) LOG.warn("Chromosomal position not found for marker " + variantId); // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } snpFileWriter.write(variantId + "\t" + (chromAndPos.size() == 0 ? "0" : chromAndPos.get(0)) + "\t" + 0 + "\t" + (chromAndPos.size() == 0 ? 0l : Long.parseLong(chromAndPos.get(1))) + LINE_SEPARATOR); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String individualId = individuals .get(sampleIDs .indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); Integer gq = null; try { gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ); } catch (Exception ignored) { } if (gq != null && gq < nMinimumGenotypeQuality) continue; Integer dp = null; try { dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP); } catch (Exception ignored) { } if (dp != null && dp < nMinimumReadDepth) continue; String gtCode = sampleGenotype.getCode(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); } for (int j = 0; j < individualList .size(); j++ /* we use this list because it has the proper ordering*/) { String individualId = individualList.get(j); List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>() : variant.getAllelesFromGenotypeCode(mostFrequentGenotype); int nOutputCode = 0; if (mostFrequentGenotype == null) nOutputCode = 9; else for (String all : Helper.split(mostFrequentGenotype, "/")) if ("0".equals(all)) nOutputCode++; if (j == 0 && variant.getKnownAlleleList().size() > 2) warningFileWriter.write("- Variant " + variant.getId() + " is multi-allelic. Make sure Eigenstrat genotype encoding specifications are suitable for you.\n"); zos.write(("" + nOutputCode).getBytes()); if (genotypeCounts.size() > 1 || alleles.size() > 2) { if (genotypeCounts.size() > 1) warningFileWriter.write("- Dissimilar genotypes found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting most frequent: " + nOutputCode + "\n"); if (alleles.size() > 2) warningFileWriter.write("- More than 2 alleles found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting only the first 2 alleles.\n"); } } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("============= exportData: " + nProgress + "% =============" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } snpFileWriter.close(); zos.putNextEntry(new ZipEntry(exportName + ".snp")); BufferedReader in = new BufferedReader(new FileReader(snpFile)); String sLine; while ((sLine = in.readLine()) != null) zos.write((sLine + "\n").getBytes()); in.close(); warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; in = new BufferedReader(new FileReader(warningFile)); while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); } finally { if (snpFile != null && snpFile.exists()) snpFile.delete(); } }
From source file:com.aurel.track.exchange.track.exporter.TrackExportBL.java
public static ZipOutputStream exportWorkItemsWithAttachments(List<ReportBean> reportBeanList, Integer personID, OutputStream outputStream) { ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream); Document document = exportWorkItems(reportBeanList, personID); ZipEntry dataEntry = new ZipEntry(ExchangeFieldNames.EXCHANGE_ZIP_ENTRY); try {//from w w w .j ava 2s . co m zipOutputStream.putNextEntry(dataEntry); } catch (IOException e) { LOGGER.error("Adding the XML data to the zip failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } ReportBeansToXML.convertToXml(zipOutputStream, document); //zipOutputStream.closeEntry(); //get the attachments for each workItem (if exist) if (reportBeanList != null) { for (ReportBean reportBean : reportBeanList) { TWorkItemBean workItemBean = reportBean.getWorkItemBean(); Integer workItemID = workItemBean.getObjectID(); String workItemAttachmentsDirectory = AttachBL.getFullDirName(null, workItemID); File file = new File(workItemAttachmentsDirectory); if (file.exists() && file.isDirectory()) { ReportBL.zipFiles(file, zipOutputStream, file.getAbsolutePath()); } } } try { zipOutputStream.close(); } catch (IOException e) { LOGGER.warn("Closing the zip failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } return zipOutputStream; }
From source file:com.genericworkflownodes.knime.nodes.io.outputfile.OutputFileNodeModel.java
/** * {@inheritDoc}/* w w w . j ava 2 s . c o m*/ */ @Override protected void saveInternals(final File internDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException { ZipOutputStream out = new ZipOutputStream(new FileOutputStream(new File(internDir, "loadeddata"))); ZipEntry entry = new ZipEntry("rawdata.bin"); out.putNextEntry(entry); out.write(data.getBytes()); out.close(); }
From source file:game.com.HandleDownloadFolderServlet.java
private void outputZipStream(ZipOutputStream output, File file) throws FileNotFoundException, IOException { for (File currentFile : file.listFiles()) { InputStream input = null; byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; try {//from w w w. j a v a2s. co m if (currentFile.isDirectory()) { outputZipStream(output, currentFile); } else { input = new BufferedInputStream(new FileInputStream(currentFile), DEFAULT_BUFFER_SIZE); output.putNextEntry(new ZipEntry(currentFile.getName())); for (int length = 0; (length = input.read(buffer)) > 0;) { output.write(buffer, 0, length); } output.closeEntry(); } } finally { if (input != null) { try { input.close(); } catch (Exception logOrIgnore) { logger.error(logOrIgnore.getMessage(), logOrIgnore); } } } } }
From source file:com.anritsu.mcrepositorymanager.utils.Packing.java
public boolean addPackageToArchive(McPackage p) { boolean result = false; for (String link : p.getDownloadLinks()) { try {// w w w . j av a2s .c om String dowloadLink[] = link.split("/"); String fileName = dowloadLink[dowloadLink.length - 1]; System.out.println("Archiving " + fileName + ": " + link); status.getArchivedPackages().add(fileName); byte[] buf = new byte[1024]; int len; FileInputStream in = new FileInputStream(FOLDER_PATH + fileName); ZipEntry e = new ZipEntry(fileName); out.putNextEntry(e); while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } out.closeEntry(); in.close(); LOGGER.log(Level.INFO, link + " succesfully archived!"); result = true; } catch (Exception ex) { Logger.getLogger(Packing.class.getName()).log(Level.SEVERE, null, ex); result = false; } } return result; }