List of usage examples for java.util.zip ZipEntry ZipEntry
public ZipEntry(ZipEntry e)
From source file:edu.dfci.cccb.mev.dataset.rest.controllers.WorkspaceController.java
private void zipAnnotations(String name, String dimension, ZipOutputStream zout) throws DatasetNotFoundException, IOException { Dataset dataset = workspace.get(name); long projectId = projectManager.getProjectID(dataset.name() + dimension); if (projectId > -1) { File annotations = new TemporaryFile(); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(annotations)); TarOutputStream tos = new TarOutputStream(gos); projectManager.exportProject(projectId, tos); tos.flush();//from w w w . jav a2 s. c o m tos.close(); zout.putNextEntry(new ZipEntry(String.format("annotations_%s.tar.gz", dimension))); IOUtils.copy(new FileInputStream(annotations), zout); zout.closeEntry(); } }
From source file:moskitt4me.repositoryClient.core.util.RepositoryClientUtil.java
public static void addFileToZip(File file, ZipOutputStream zos) throws Exception { zos.putNextEntry(new ZipEntry(file.getName())); BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file)); long bytesRead = 0; byte[] bytesIn = new byte[1024]; int read = 0; while ((read = bis.read(bytesIn)) != -1) { zos.write(bytesIn, 0, read);/*from www .j a v a 2 s . c om*/ bytesRead += read; } zos.closeEntry(); bis.close(); }
From source file:fr.cirad.mgdb.exporting.markeroriented.VcfExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { Integer projectId = null;/*w w w. j a v a 2s. c o m*/ for (SampleId spId : sampleIDs) { if (projectId == null) projectId = spId.getProject(); else if (projectId != spId.getProject()) { projectId = 0; break; // more than one project are involved: no header will be written } } File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); } } LinkedHashMap<SampleId, String> sampleIDToIndividualIdMap = new LinkedHashMap<SampleId, String>(); ArrayList<String> individualList = new ArrayList<String>(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); for (int i = 0; i < sampleIDs.size(); i++) { String individualId = individuals.get(i).getId(); sampleIDToIndividualIdMap.put(sampleIDs.get(i), individualId); if (!individualList.contains(individualId)) { individualList.add(individualId); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".vcf")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nQueryChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; VariantContextWriter writer = null; try { List<String> distinctSequenceNames = new ArrayList<String>(); String sequenceSeqCollName = MongoTemplateManager.getMongoCollectionName(Sequence.class); if (mongoTemplate.collectionExists(sequenceSeqCollName)) { DBCursor markerCursorCopy = markerCursor.copy(); markerCursorCopy.batchSize(nQueryChunkSize); while (markerCursorCopy.hasNext()) { int nLoadedMarkerCountInLoop = 0; boolean fStartingNewChunk = true; while (markerCursorCopy.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) { DBObject exportVariant = markerCursorCopy.next(); String chr = (String) ((DBObject) exportVariant .get(VariantData.FIELDNAME_REFERENCE_POSITION)) .get(ReferencePosition.FIELDNAME_SEQUENCE); if (!distinctSequenceNames.contains(chr)) distinctSequenceNames.add(chr); } } markerCursorCopy.close(); } Collections.sort(distinctSequenceNames, new AlphaNumericStringComparator()); SAMSequenceDictionary dict = createSAMSequenceDictionary(sModule, distinctSequenceNames); writer = new CustomVCFWriter(null, zos, dict, false, false, true); // VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder(); // vcwb.unsetOption(Options.INDEX_ON_THE_FLY); // vcwb.unsetOption(Options.DO_NOT_WRITE_GENOTYPES); // vcwb.setOption(Options.USE_ASYNC_IOINDEX_ON_THE_FLY); // vcwb.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER); // vcwb.setReferenceDictionary(dict); // writer = vcwb.build(); // writer = new AsyncVariantContextWriter(writer, 3000); progress.moveToNextStep(); // done with dictionary DBCursor headerCursor = mongoTemplate .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class)) .find(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId)); Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>(); boolean fWriteCommandLine = true, fWriteEngineHeaders = true; // default values while (headerCursor.hasNext()) { DBVCFHeader dbVcfHeader = DBVCFHeader.fromDBObject(headerCursor.next()); headerLines.addAll(dbVcfHeader.getHeaderLines()); // Add sequence header lines (not stored in our vcf header collection) BasicDBObject projection = new BasicDBObject(SequenceStats.FIELDNAME_SEQUENCE_LENGTH, true); int nSequenceIndex = 0; for (String sequenceName : distinctSequenceNames) { String sequenceInfoCollName = MongoTemplateManager.getMongoCollectionName(SequenceStats.class); boolean fCollectionExists = mongoTemplate.collectionExists(sequenceInfoCollName); if (fCollectionExists) { DBObject record = mongoTemplate.getCollection(sequenceInfoCollName).findOne( new Query(Criteria.where("_id").is(sequenceName)).getQueryObject(), projection); if (record == null) { LOG.warn("Sequence '" + sequenceName + "' not found in collection " + sequenceInfoCollName); continue; } Map<String, String> sequenceLineData = new LinkedHashMap<String, String>(); sequenceLineData.put("ID", (String) record.get("_id")); sequenceLineData.put("length", ((Number) record.get(SequenceStats.FIELDNAME_SEQUENCE_LENGTH)).toString()); headerLines.add(new VCFContigHeaderLine(sequenceLineData, nSequenceIndex++)); } } fWriteCommandLine = headerCursor.size() == 1 && dbVcfHeader.getWriteCommandLine(); // wouldn't make sense to include command lines for several runs if (!dbVcfHeader.getWriteEngineHeaders()) fWriteEngineHeaders = false; } headerCursor.close(); VCFHeader header = new VCFHeader(headerLines, individualList); header.setWriteCommandLine(fWriteCommandLine); header.setWriteEngineHeaders(fWriteEngineHeaders); writer.writeHeader(header); short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; HashMap<SampleId, Comparable /*phID*/> phasingIDsBySample = new HashMap<SampleId, Comparable>(); while (markerCursor.hasNext()) { if (progress.hasAborted()) return; int nLoadedMarkerCountInLoop = 0; boolean fStartingNewChunk = true; markerCursor.batchSize(nQueryChunkSize); List<Comparable> currentMarkers = new ArrayList<Comparable>(); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); currentMarkers.add((Comparable) exportVariant.get("_id")); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) { VariantContext vc = variant.toVariantContext(variantsAndRuns.get(variant), !ObjectId.isValid(variant.getId().toString()), sampleIDToIndividualIdMap, phasingIDsBySample, nMinimumGenotypeQuality, nMinimumReadDepth, warningFileWriter, markerSynonyms == null ? variant.getId() : markerSynonyms.get(variant.getId())); try { writer.add(vc); } catch (Throwable t) { Exception e = new Exception("Unable to convert to VariantContext: " + variant.getId(), t); LOG.debug("error", e); throw e; } if (nLoadedMarkerCountInLoop > currentMarkers.size()) LOG.error("Bug: writing variant number " + nLoadedMarkerCountInLoop + " (only " + currentMarkers.size() + " variants expected)"); } nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } progress.setCurrentStepProgress((short) 100); } catch (Exception e) { LOG.error("Error exporting", e); progress.setError(e.getMessage()); return; } finally { warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); if (writer != null) try { writer.close(); } catch (Throwable ignored) { } } }
From source file:de.hybris.platform.impex.jalo.ImpExMediasImportTest.java
/** * Calls the <code>importData</code> method of given handler with an zip-file path in different formats. * // ww w . ja v a2s .c om * @param handler * handler which will be used for test * @param media * media where the data will be imported to */ private void mediaImportFromZip(final MediaDataHandler handler, final Media media) { File testFile = null; try { testFile = File.createTempFile("mediaImportTest", ".zip"); final ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(testFile)); zos.putNextEntry(new ZipEntry(new File("files", "dummy.txt").getPath())); zos.putNextEntry(new ZipEntry(new File("files", "test.txt").getPath())); final PrintWriter printer = new PrintWriter(zos); printer.print("testest"); printer.flush(); zos.flush(); printer.close(); zos.close(); } catch (final IOException e) { e.printStackTrace(); fail(e.getMessage()); } final String unixPathRel = ImpExConstants.Syntax.ZIP_BASED_FILE_PATH_PREFIX + FilenameUtils.separatorsToUnix(testFile.getPath()) + "&files/test.txt"; final String unixPathAbs = ImpExConstants.Syntax.ZIP_BASED_FILE_PATH_PREFIX + FilenameUtils.separatorsToUnix(testFile.getAbsolutePath()) + "&files/test.txt"; final String winPathRel = ImpExConstants.Syntax.ZIP_BASED_FILE_PATH_PREFIX + FilenameUtils.separatorsToWindows(testFile.getPath()) + "&files\\test.txt"; final String winPathAbs = ImpExConstants.Syntax.ZIP_BASED_FILE_PATH_PREFIX + FilenameUtils.separatorsToWindows(testFile.getAbsolutePath()) + "&files\\test.txt"; try { mediaImport(handler, media, unixPathRel, "testest"); mediaImport(handler, media, unixPathAbs, "testest"); mediaImport(handler, media, winPathRel, "testest"); mediaImport(handler, media, winPathAbs, "testest"); } catch (final Exception e) { fail(e.getMessage()); } if (!testFile.delete()) { fail("Can not delete temp file: " + testFile.getPath()); } }
From source file:cz.zcu.kiv.eegdatabase.wui.core.experiments.ExperimentDownloadProvider.java
@Transactional public FileDTO generatePackageFile(ExperimentPackage pckg, MetadataCommand mc, License license, List<Experiment> selectList, Person loggedUser, DownloadPackageManager manager) { ZipOutputStream zipOutputStream = null; FileOutputStream fileOutputStream = null; File tempZipFile = null;//from w ww. j a va2 s. co m ZipInputStream in = null; File file = null; try { FileDTO dto = new FileDTO(); dto.setFileName(pckg.getName().replaceAll("\\s", "_") + ".zip"); // create temp zip file tempZipFile = File.createTempFile("experimentDownload_", ".zip"); // open stream to temp zip file fileOutputStream = new FileOutputStream(tempZipFile); // prepare zip stream zipOutputStream = new ZipOutputStream(fileOutputStream); for (Experiment tmp : selectList) { Experiment exp = service.getExperimentForDetail(tmp.getExperimentId()); String experimentDirPrefix = ""; // create directory for each experiment. String scenarioName = exp.getScenario().getTitle(); if (scenarioName != null) { experimentDirPrefix = "Experiment_" + exp.getExperimentId() + "_" + scenarioName.replaceAll("\\s", "_") + "/"; } else experimentDirPrefix = "Experiment_data_" + exp.getExperimentId() + "/"; // generate temp zip file with experiment byte[] licenseFile = licenseService.getLicenseAttachmentContent(license.getLicenseId()); file = zipGenerator.generate(exp, mc, exp.getDataFiles(), licenseFile, license.getAttachmentFileName()); in = new ZipInputStream(new FileInputStream(file)); ZipEntry entryIn = null; // copy unziped experiment in package zip file. // NOTE: its easier solution copy content of one zip in anoter instead create directory structure via java.io.File. while ((entryIn = in.getNextEntry()) != null) { zipOutputStream.putNextEntry(new ZipEntry(experimentDirPrefix + entryIn.getName())); IOUtils.copyLarge(in, zipOutputStream); zipOutputStream.closeEntry(); } // mark all temp files for package for delete on exit FileUtils.deleteOnExitQuietly(file); IOUtils.closeQuietly(in); FileUtils.deleteQuietly(file); createHistoryRecordAboutDownload(exp, loggedUser); synchronized (this) { manager.setNumberOfDownloadedExperiments(manager.getNumberOfDownloadedExperiments() + 1); } } dto.setFile(tempZipFile); // no problem detected - close all streams and mark file for delete on exit. // file is deleted after download action FileUtils.deleteOnExitQuietly(tempZipFile); IOUtils.closeQuietly(zipOutputStream); IOUtils.closeQuietly(fileOutputStream); return dto; } catch (Exception e) { log.error(e.getMessage(), e); // problem detected - close all streams, mark files for delete on exit and try delete them. IOUtils.closeQuietly(zipOutputStream); IOUtils.closeQuietly(fileOutputStream); FileUtils.deleteOnExitQuietly(tempZipFile); FileUtils.deleteOnExitQuietly(file); FileUtils.deleteQuietly(tempZipFile); FileUtils.deleteQuietly(file); return null; } }
From source file:com.replaymod.replaystudio.io.ReplayOutputStream.java
/** * Starts a new entry in this replay zip file. * The previous entry is therefore closed. * @param name Name of the new entry//from w w w .j ava 2s. co m */ public void nextEntry(String name) throws IOException { if (zipOut != null) { zipOut.closeEntry(); zipOut.putNextEntry(new ZipEntry(name)); } else { throw new UnsupportedOperationException("Cannot start new entry when writing raw replay output."); } }
From source file:com.thinkbiganalytics.feedmgr.service.ExportImportTemplateService.java
private byte[] zip(RegisteredTemplate template, String nifiTemplateXml, List<String> reusableTemplateXmls) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ZipOutputStream zos = new ZipOutputStream(baos)) { ZipEntry entry = new ZipEntry(NIFI_TEMPLATE_XML_FILE); zos.putNextEntry(entry);/*from w ww . j ava 2 s .c o m*/ zos.write(nifiTemplateXml.getBytes()); zos.closeEntry(); int reusableTemplateNumber = 0; for (String reusableTemplateXml : reusableTemplateXmls) { entry = new ZipEntry(String.format("%s_%s.xml", NIFI_CONNECTING_REUSABLE_TEMPLATE_XML_FILE, reusableTemplateNumber++)); zos.putNextEntry(entry); zos.write(reusableTemplateXml.getBytes()); zos.closeEntry(); } entry = new ZipEntry(TEMPLATE_JSON_FILE); zos.putNextEntry(entry); String json = ObjectMapperSerializer.serialize(template); zos.write(json.getBytes()); zos.closeEntry(); } catch (IOException ioe) { throw new RuntimeException(ioe); } return baos.toByteArray(); }
From source file:net.morematerials.manager.UpdateManager.java
private void updateSmp(File file) throws Exception { ZipFile smpFile = new ZipFile(file); Enumeration<? extends ZipEntry> entries = smpFile.entries(); String smpName = file.getName().substring(0, file.getName().lastIndexOf(".")); // First we need to know what files are in this .smp file, because the old format uses magic filename matching. ArrayList<String> containedFiles = new ArrayList<String>(); HashMap<String, YamlConfiguration> materials = new HashMap<String, YamlConfiguration>(); // Now we walk through the file once and store every file. ZipEntry entry;//from ww w .java2s .c om YamlConfiguration yaml; while (entries.hasMoreElements()) { entry = entries.nextElement(); // Only if its a .yml file if (entry.getName().endsWith(".yml")) { // Load the .yml file yaml = new YamlConfiguration(); yaml.load(smpFile.getInputStream(entry)); // Texture is required for new package format. if (!yaml.contains("Texture")) { materials.put(entry.getName().substring(0, entry.getName().lastIndexOf(".")), yaml); } else { containedFiles.add(entry.getName()); } } else { containedFiles.add(entry.getName()); } } // If this map contains any entry, we need to convert something. if (materials.size() > 0) { this.plugin.getUtilsManager().log("Deprecated .smp found: " + file.getName() + ". Updating..."); // We need a temporary directory to update the .smp in. this.tempDir.mkdir(); // First extract all untouched assets: for (String filename : containedFiles) { InputStream in = smpFile.getInputStream(smpFile.getEntry(filename)); OutputStream out = new FileOutputStream(new File(this.tempDir, filename)); int read; byte[] bytes = new byte[1024]; while ((read = in.read(bytes)) != -1) { out.write(bytes, 0, read); } out.flush(); out.close(); in.close(); } // Now convert each .yml file in this archive. YamlConfiguration oldYaml; YamlConfiguration newYaml; for (String materialName : materials.keySet()) { oldYaml = materials.get(materialName); newYaml = new YamlConfiguration(); // Required "Type" which is Block or Item. Old format didnt support Tools anyway. newYaml.set("Type", oldYaml.getString("Type")); // Title is now required and falls back to filename. newYaml.set("Title", oldYaml.getString("Title", materialName)); // Now call the converter methods. if (newYaml.getString("Type").equals("Block")) { this.convertBlock(oldYaml, newYaml, materialName, containedFiles); this.convertBlockHandlers(oldYaml, newYaml); } else if (newYaml.getString("Type").equals("Item")) { this.convertItem(oldYaml, newYaml, materialName, containedFiles); this.convertItemHandlers(oldYaml, newYaml); } // Copy over recipes - nothing changed here! if (oldYaml.contains("Recipes")) { newYaml.set("Recipes", oldYaml.getList("Recipes")); } // Finally store the new .yml file. String yamlString = newYaml.saveToString(); BufferedWriter out = new BufferedWriter( new FileWriter(new File(this.tempDir, materialName + ".yml"))); out.write(this.fixYamlProblems(yamlString)); out.close(); // Also update itemmap entry! for (Integer i = 0; i < this.itemMap.size(); i++) { String oldMaterial = this.itemMap.get(i).replaceAll("^[0-9]+:MoreMaterials.", ""); if (oldMaterial.equals(newYaml.getString("Title"))) { this.itemMap.set(i, this.itemMap.get(i).replaceAll("^([0-9]+:MoreMaterials.).+$", "$1" + smpName + "." + materialName)); break; } } // And we need to tell SpoutPlugin that this material must be renamed! SpoutManager.getMaterialManager().renameMaterialKey(this.plugin, newYaml.getString("Title"), smpName + "." + materialName); } // First remove old .smp file smpFile.close(); file.delete(); // Then repack the new .smp file ZipOutputStream out = new ZipOutputStream(new FileOutputStream(file)); for (File entryFile : this.tempDir.listFiles()) { FileInputStream in = new FileInputStream(entryFile); out.putNextEntry(new ZipEntry(entryFile.getName())); Integer len; byte[] buf = new byte[1024]; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } out.closeEntry(); in.close(); } out.close(); // At last remove the temp directory. FileUtils.deleteDirectory(this.tempDir); } else { // At last, close the file handle. smpFile.close(); } }
From source file:org.cloudfoundry.tools.io.zip.ZipArchiveTest.java
@Test public void shouldReloadIfChanged() throws Exception { File file = this.zip.getFile("a/b/c.txt"); assertThat(file.getContent().asString(), is("c")); ZipOutputStream zipOutputStream = new ZipOutputStream(new FileOutputStream(this.zipFile)); try {/*from ww w. j a va 2 s . co m*/ zipOutputStream.putNextEntry(new ZipEntry("/a/b/c.txt")); zipOutputStream.write("c2".getBytes()); } finally { zipOutputStream.close(); } assertThat(file.getContent().asString(), is("c2")); }
From source file:de.uniwue.info6.database.jaxb.ScenarioExporter.java
/** * * * @param files/*w w w . j a v a 2 s . co m*/ * @param zipfile * @return */ private File zip(List<File> files, File zipfile) { byte[] buf = new byte[1024]; try { ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zipfile)); for (int i = 0; i < files.size(); i++) { FileInputStream in = new FileInputStream(files.get(i)); out.putNextEntry(new ZipEntry(files.get(i).getName())); int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } out.closeEntry(); in.close(); } out.close(); for (File file : files) { if (file.isFile() && file.exists() && file.canWrite()) { file.delete(); } } return zipfile; } catch (Exception e) { LOGGER.error("FAILED TO ZIP FILES", e); } return null; }