List of usage examples for java.util.zip ZipFile close
public void close() throws IOException
From source file:org.eclipse.orion.internal.server.servlets.xfer.ClientImport.java
/** * Unzips the transferred file. Returns <code>true</code> if the unzip was * successful, and <code>false</code> otherwise. In case of failure, this method * handles setting an appropriate response. *///from w w w.ja va2 s. c om private boolean completeUnzip(HttpServletRequest req, HttpServletResponse resp) throws ServletException { IPath destPath = new Path(getPath()); boolean force = false; List<String> filesFailed = new ArrayList<String>(); if (req.getParameter("force") != null) { force = req.getParameter("force").equals("true"); } List<String> excludedFiles = new ArrayList<String>(); if (req.getParameter(ProtocolConstants.PARAM_EXCLUDE) != null) { excludedFiles = Arrays.asList(req.getParameter(ProtocolConstants.PARAM_EXCLUDE).split(",")); } try { ZipFile source = new ZipFile(new File(getStorageDirectory(), FILE_DATA)); IFileStore destinationRoot = NewFileServlet.getFileStore(req, destPath); Enumeration<? extends ZipEntry> entries = source.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); IFileStore destination = destinationRoot.getChild(entry.getName()); if (!destinationRoot.isParentOf(destination) || hasExcludedParent(destination, destinationRoot, excludedFiles)) { //file should not be imported continue; } if (entry.isDirectory()) destination.mkdir(EFS.NONE, null); else { if (!force && destination.fetchInfo().exists()) { filesFailed.add(entry.getName()); continue; } destination.getParent().mkdir(EFS.NONE, null); // this filter will throw an IOException if a zip entry is larger than 100MB FilterInputStream maxBytesReadInputStream = new FilterInputStream( source.getInputStream(entry)) { private static final int maxBytes = 0x6400000; // 100MB private int totalBytes; private void addByteCount(int count) throws IOException { totalBytes += count; if (totalBytes > maxBytes) { throw new IOException("Zip file entry too large"); } } @Override public int read() throws IOException { int c = super.read(); if (c != -1) { addByteCount(1); } return c; } @Override public int read(byte[] b, int off, int len) throws IOException { int read = super.read(b, off, len); if (read != -1) { addByteCount(read); } return read; } }; boolean fileWritten = false; try { IOUtilities.pipe(maxBytesReadInputStream, destination.openOutputStream(EFS.NONE, null), false, true); fileWritten = true; } finally { if (!fileWritten) { try { destination.delete(EFS.NONE, null); } catch (CoreException ce) { // best effort } } } } } source.close(); if (!filesFailed.isEmpty()) { String failedFilesList = ""; for (String file : filesFailed) { if (failedFilesList.length() > 0) { failedFilesList += ", "; } failedFilesList += file; } String msg = NLS.bind( "Failed to transfer all files to {0}, the following files could not be overwritten {1}", destPath.toString(), failedFilesList); JSONObject jsonData = new JSONObject(); jsonData.put("ExistingFiles", filesFailed); statusHandler.handleRequest(req, resp, new ServerStatus(IStatus.ERROR, HttpServletResponse.SC_BAD_REQUEST, msg, jsonData, null)); return false; } } catch (ZipException e) { //zip exception implies client sent us invalid input String msg = NLS.bind("Failed to complete file transfer on {0}", destPath.toString()); statusHandler.handleRequest(req, resp, new ServerStatus(IStatus.ERROR, HttpServletResponse.SC_BAD_REQUEST, msg, e)); return false; } catch (Exception e) { //other failures should be considered server errors String msg = NLS.bind("Failed to complete file transfer on {0}", destPath.toString()); statusHandler.handleRequest(req, resp, new ServerStatus(IStatus.ERROR, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, msg, e)); return false; } return true; }
From source file:com.alcatel_lucent.nz.wnmsextract.reader.FileUtilities.java
public void decompressZip(File inputZipPath, File zipPath) { int BUFFER = 2048; List<File> zipFiles = new ArrayList<File>(); try {/*from ww w . j av a 2 s. c om*/ zipPath.mkdir(); } catch (SecurityException e) { jlog.fatal("Security exception when creating " + zipPath.getName()); } ZipFile zipFile = null; boolean isZip = true; // Open Zip file for reading (should be in temppath) try { zipFile = new ZipFile(inputZipPath, ZipFile.OPEN_READ); } catch (IOException e) { jlog.fatal("IO exception in " + inputZipPath.getName()); } // Create an enumeration of the entries in the zip file Enumeration<? extends ZipEntry> zipFileEntries = zipFile.entries(); if (isZip) { // Process each entry while (zipFileEntries.hasMoreElements()) { // Get a zip file entry ZipEntry entry = zipFileEntries.nextElement(); String currentEntry = entry.getName(); File destFile = null; // destFile should be pointing to temppath\%date%\ try { destFile = new File(zipPath.getAbsolutePath(), currentEntry); destFile = new File(zipPath.getAbsolutePath(), destFile.getName()); } catch (NullPointerException e) { jlog.fatal("File not found" + destFile.getName()); } // If the entry is a .zip add it to the list so that it can be extracted if (currentEntry.endsWith(".zip")) { zipFiles.add(destFile); } try { // Extract file if not a directory if (!entry.isDirectory()) { // Stream the zip entry BufferedInputStream is = new BufferedInputStream(zipFile.getInputStream(entry)); int currentByte; // establish buffer for writing file byte data[] = new byte[BUFFER]; FileOutputStream fos = null; // Write the current file to disk try { fos = new FileOutputStream(destFile); } catch (FileNotFoundException e) { jlog.fatal("File not found " + destFile.getName()); } catch (SecurityException e) { jlog.fatal("Access denied to " + destFile.getName()); } BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER); // read and write until last byte is encountered while ((currentByte = is.read(data, 0, BUFFER)) != -1) { dest.write(data, 0, currentByte); } dest.flush(); dest.close(); is.close(); } } catch (IOException ioe) { jlog.fatal("IO exception in " + zipFile.getName()); } } try { zipFile.close(); } catch (IOException e) { jlog.fatal("IO exception when closing " + zipFile.getName()); } } // Recursively decompress the list of zip files for (File f : zipFiles) { decompressZip(f, zipPath); } return; }
From source file:org.bdval.BDVModel.java
/** * Loads a BDVal model from disk. BDVal models are generated with the * {@link org.bdval.DiscoverAndValidate} tools (BDVal). * * @param options specific options to use when loading the model * @throws IOException if there is a problem accessing the model * @throws ClassNotFoundException if the type of the model is not recognized *//*w w w .j a va 2 s .co m*/ public void load(final DAVOptions options) throws IOException, ClassNotFoundException { final boolean zipExists = new File(zipFilename).exists(); if (LOG.isDebugEnabled()) { LOG.debug("model zip file exists: " + BooleanUtils.toStringYesNo(zipExists)); } properties.clear(); properties.setDelimiterParsingDisabled(true); // check to see if a zip file exists - if it doesn't we assume it's an old binary format if (zipModel && zipExists) { LOG.info("Reading model from filename: " + zipFilename); final ZipFile zipFile = new ZipFile(zipFilename); try { final ZipEntry propertyEntry = zipFile.getEntry(FilenameUtils.getName(modelPropertiesFilename)); // load properties properties.clear(); properties.addAll(loadProperties(zipFile.getInputStream(propertyEntry), options)); // the platform is more than one entry in the zip, so here we pass the whole zip trainingPlatform = options.trainingPlatform = loadPlatform(zipFile); if (isConsensusModel()) { int index = 0; final ObjectList<String> modelJurorFilePrefixes = new ObjectArrayList<String>(); String nextFilename; while ((nextFilename = (String) properties .getProperty("bdval.consensus.model." + Integer.toString(index))) != null) { modelJurorFilePrefixes.add(nextFilename); index++; } delegate = new ConsensusBDVModel(modelFilenamePrefix, modelJurorFilePrefixes.toArray(new String[modelJurorFilePrefixes.size()])); delegate.load(options); setGeneList(convertTrainingPlatformToGeneList(options)); return; } else { probesetScaleMeanMap = options.probesetScaleMeanMap = loadMeansMap( zipFile.getInputStream(zipFile.getEntry(FilenameUtils.getName(meansMapFilename)))); probesetScaleRangeMap = options.probesetScaleRangeMap = loadRangeMap( zipFile.getInputStream(zipFile.getEntry(FilenameUtils.getName(rangeMapFilename)))); setGeneList(convertTrainingPlatformToGeneList(options)); } final String modelParameters = properties.getString("training.classifier.parameters"); LOG.info("Loading model " + modelFilename); final InputStream modelStream = zipFile .getInputStream(zipFile.getEntry(FilenameUtils.getName(modelFilename))); helper = ClassificationModel.load(modelStream, modelParameters); LOG.info("Model loaded."); options.classiferClass = helper.classifier.getClass(); // we don't have a way to inspect the saved model for parameters used during training: options.classifierParameters = ClassificationModel.splitModelParameters(modelParameters); } finally { try { zipFile.close(); } catch (IOException e) { // NOPMD // ignore since there is not much we can do anyway } } } else { final File propertyFile = new File(modelFilenamePrefix + "." + ModelFileExtension.props.toString()); LOG.debug("Loading properties from " + propertyFile.getAbsolutePath()); final Properties properties = loadProperties(FileUtils.openInputStream(propertyFile), options); trainingPlatform = options.trainingPlatform = (GEOPlatformIndexed) BinIO.loadObject(platformFilename); if (isConsensusModel()) { int index = 0; final ObjectList<String> modelJurorFilePrefixes = new ObjectArrayList<String>(); String nextFilename = null; while ((nextFilename = (String) properties .getProperty("bdval.consensus.model." + Integer.toString(index))) != null) { modelJurorFilePrefixes.add(nextFilename); index++; } delegate = new ConsensusBDVModel(modelFilenamePrefix, modelJurorFilePrefixes.toArray(new String[modelJurorFilePrefixes.size()])); delegate.load(options); setGeneList(convertTrainingPlatformToGeneList(options)); return; } else { probesetScaleMeanMap = options.probesetScaleMeanMap = (Object2DoubleMap<MutableString>) BinIO .loadObject(modelFilenamePrefix + ".means"); if (LOG.isDebugEnabled()) { LOG.debug("Number of entries in means map = " + probesetScaleMeanMap.size()); } probesetScaleRangeMap = options.probesetScaleRangeMap = (Object2DoubleMap<MutableString>) BinIO .loadObject(modelFilenamePrefix + ".ranges"); if (LOG.isDebugEnabled()) { LOG.debug("Number of entries in range map = " + probesetScaleRangeMap.size()); } setGeneList(convertTrainingPlatformToGeneList(options)); } final String modelParameters = properties.getString("training.classifier.parameters"); LOG.info("Loading model " + modelFilename); helper = ClassificationModel.load(modelFilename, modelParameters); LOG.info("Model loaded."); options.classiferClass = helper.classifier.getClass(); // we don't have a way to inspect the saved model for parameters used during training: options.classifierParameters = ClassificationModel.splitModelParameters(modelParameters); } }
From source file:org.sonatype.flexmojos.rsl.InstallerMojo.java
/** * @param artifact/*from www.j ava2 s . co m*/ * @param rslArtifact * @throws MojoExecutionException */ protected void processDependency(Artifact artifact, Artifact rslArtifact) throws MojoExecutionException { try { // lookup RSL artifact resolver.resolve(rslArtifact, remoteRepositories, localRepository); getLog().debug("Artifact RSL found: " + rslArtifact); File outputFile = new File(outputDirectory, getFormattedFileName(rslArtifact)); try { FileUtils.copyFile(rslArtifact.getFile(), outputFile); } catch (IOException ioe) { throw new MojoExecutionException(ioe.getMessage(), ioe); } } catch (AbstractArtifactResolutionException aare) { // RSL not available then create it ZipFile archive = null; InputStream input = null; OutputStream output = null; File originalFile = null; try { Artifact originalArtifact = artifactFactory.createArtifactWithClassifier(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), artifact.getType(), originalClassifier); try { resolver.resolve(originalArtifact, remoteRepositories, localRepository); artifact = originalArtifact; getLog().debug("Original artifact found: the RSL will be produced against the original one"); } catch (Exception e) { getLog().debug( "Original artifact not found: assuming the RSL production has never been executed before"); resolver.resolve(artifact, remoteRepositories, localRepository); } archive = newZipFile(artifact.getFile()); input = readLibrarySwf(artifact.getFile(), archive); File outputFile = new File(outputDirectory, getFormattedFileName(rslArtifact)); output = new FileOutputStream(outputFile); if (optimizeRsls) { originalFile = new File(project.getBuild().getOutputDirectory(), originalArtifact.getFile().getName()); FileUtils.copyFile(artifact.getFile(), originalFile); getLog().info("Attempting to optimize: " + artifact); long initialSize = artifact.getFile().length() / 1024; optimize(input, output); long optimizedSize = outputFile.length() / 1024; getLog().info("\t\tsize reduced from " + initialSize + "kB to " + optimizedSize + "kB"); updateDigest(outputFile, artifact.getFile()); } if (deploy) { ArtifactRepository deploymentRepository = project.getDistributionManagementArtifactRepository(); if (optimizeRsls) { if (backup) { deployer.deploy(originalFile, originalArtifact, deploymentRepository, localRepository); } deployer.deploy(artifact.getFile(), artifact, deploymentRepository, localRepository); } deployer.deploy(outputFile, rslArtifact, deploymentRepository, localRepository); } else { if (backup && optimizeRsls && !originalArtifact.isResolved()) { try { resolver.resolve(originalArtifact, remoteRepositories, localRepository); } catch (Exception e) { installer.install(originalFile, originalArtifact, localRepository); } } installer.install(outputFile, rslArtifact, localRepository); } } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } finally { IOUtil.close(input); IOUtil.close(output); if (archive != null) { try { archive.close(); if (originalFile != null) { originalFile.delete(); } } catch (IOException e) { // ignore } } } } }
From source file:org.geosdi.geoplatform.services.GPPublisherBasicServiceImpl.java
/** * ************//from ww w . ja v a 2s . com * * @param file the ZIP file from where extracting the info * @return the information of the shapefile this method extracts from a zip * file containing the shape files, the name, the CRS and the geometry types */ private List<LayerInfo> getInfoFromCompressedFile(String userName, File file, String tempUserDir, String tempUserZipDir, String tempUserTifDir, String workspace) throws ResourceNotFoundFault { logger.debug("Call to getInfoFromCompressedShape"); System.setProperty("org.geotools.referencing.forceXY", "true"); List<String> shpEntryNameList = Lists.<String>newArrayList(); List<String> tifEntryNameList = Lists.<String>newArrayList(); List<ZipEntry> sldEntryList = Lists.<ZipEntry>newArrayList(); List<ZipEntry> prjEntryList = Lists.<ZipEntry>newArrayList(); List<LayerInfo> infoShapeList = Lists.<LayerInfo>newArrayList(); ZipFile zipSrc = null; try { // decomprime il contenuto di file nella cartella <tmp>/geoportal/shp zipSrc = new ZipFile(file); Enumeration<? extends ZipEntry> entries = zipSrc.entries(); String destinationDir; while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); String entryName = entry.getName(); File newFile = new File(entryName); if (newFile.isDirectory()) { continue; } // entryName = entryName.replaceAll("/", "_"); // logger.info("\n ********** INFO:"+entryName); int lastIndex = entryName.lastIndexOf('/'); entryName = entryName.substring(lastIndex + 1).toLowerCase(); destinationDir = tempUserDir; if (GPSharedUtils.isEmpty(entryName) || entryName.startsWith(".") || entryName.equalsIgnoreCase("__MACOSX")) { continue; } else if (entryName.endsWith(".tif") || entryName.endsWith(".tiff")) { logger.info("INFO: Found geotiff file " + entryName); tifEntryNameList.add(entryName); destinationDir = tempUserTifDir; } else if (entryName.endsWith(".shp")) { logger.info("INFO: Found shape file " + entryName); shpEntryNameList.add(entryName); } else if (entryName.endsWith(".sld")) { logger.info("Adding sld to entry list: " + entryName); sldEntryList.add(entry); continue; } else if (entryName.endsWith(".prj")) { logger.info("Adding prj to entry list: " + entryName); prjEntryList.add(entry); continue; } else if (entryName.endsWith(".tfw")) { destinationDir = tempUserTifDir; } PublishUtility.extractEntryToFile(entry, zipSrc, destinationDir); } //Verificare presenza file sld associato a geotiff oppure a shp file this.putEntryInTheRightDir(sldEntryList, zipSrc, tempUserTifDir, tempUserDir, tifEntryNameList); this.putEntryInTheRightDir(prjEntryList, zipSrc, tempUserTifDir, tempUserDir, tifEntryNameList); // fine decompressione } catch (Exception e) { logger.error("ERROR: " + e); throw new IllegalArgumentException("ERROR: " + e); } finally { try { zipSrc.close(); } catch (IOException ex) { } } infoShapeList .addAll(this.analyzeShpList(shpEntryNameList, userName, tempUserDir, tempUserZipDir, workspace)); infoShapeList.addAll(this.analyzeTifList(tifEntryNameList, userName, tempUserTifDir, workspace)); // svuota la cartella degli shape <tmp>/geoportal/UserDir File directory = new File(tempUserDir); File[] files = directory.listFiles(); for (File f : files) { f.delete(); } return infoShapeList; }
From source file:processing.app.Editor.java
protected String findClassInZipFile(String base, File file) { // Class file to search for String classFileName = "/" + base + ".class"; ZipFile zipFile = null; try {//from w w w .j a v a 2 s . c om zipFile = new ZipFile(file); Enumeration<?> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); if (!entry.isDirectory()) { String name = entry.getName(); //System.out.println("entry: " + name); if (name.endsWith(classFileName)) { //int slash = name.lastIndexOf('/'); //String packageName = (slash == -1) ? "" : name.substring(0, slash); // Remove .class and convert slashes to periods. return name.substring(0, name.length() - 6).replace('/', '.'); } } } } catch (IOException e) { //System.err.println("Ignoring " + filename + " (" + e.getMessage() + ")"); e.printStackTrace(); } finally { if (zipFile != null) { try { zipFile.close(); } catch (IOException e) { // noop } } } return null; }
From source file:com.pari.mw.api.execute.reports.template.ReportTemplateRunner.java
public void executeReportTemplate(String templateId, ServerIfProvider serverIfProvider, ExportInfo exportInfo, int customerId, String customerName) throws Exception { ZipFile zipFile = null; InputStream indexStream = null; InputStream requestStream = null; try {/*from w ww .j a va2 s . c om*/ getZipFile(templateId, serverIfProvider, exportInfo, customerId); zipFile = new ZipFile(new File(zipFileName)); // read the index xml indexStream = getInputStream(zipFile, "index.xml"); if (indexStream != null) { templateIndexDef = ReportTemplateIndexDef.loadFromXML(indexStream); } else { logger.error("Unable to find the template index file."); throw new Exception("Unable to find the template index file."); } // read the request xml if (templateIndexDef != null) { String requestFileName = templateIndexDef.getXmlRequestFileName(); if (requestFileName != null) { requestStream = getInputStream(zipFile, requestFileName); if (requestStream != null) { templateRequestDef = ReportTemplateRequestDef.loadFromXML(requestStream); } } } else { logger.error("Unable to parse/ load the template index file."); throw new Exception("Unable to parse/ load the template index file."); } if (templateRequestDef != null) { VariableListDefinition varListDef = templateRequestDef.getVariableList(); Map<String, VariableDefinition> varMap = null; if (varListDef != null) { varMap = varListDef.getVariables(); } Map<String, String> varValues = new HashMap<String, String>(); if (varMap != null) { for (String var : varMap.keySet()) { VariableDefinition def = varMap.get(var); if (def != null) { varValues.put(var, def.getDefaultValue()); } } } ExportProgressMonitor exportProgressMonitor = new ExportProgressMonitor(templateRequestDef, varValues, getDirectoryName(), ExportFormat.getEnumTypeFromExt(getExportFormat()), serverIfProvider, exportInfo, customerId, customerName, templateId); Thread t = new Thread(exportProgressMonitor); t.start(); } else { logger.error("Unable to parse/ load the template request file."); throw new Exception("Unable to parse/ load the template request file."); } } catch (Exception e) { logger.error("Report template execution failed.", e); throw new Exception("Report template execution failed"); } finally { if (indexStream != null) { try { indexStream.close(); } catch (IOException e) { logger.warn("Unable to close the InputStream " + indexStream); } } if (requestStream != null) { try { requestStream.close(); } catch (IOException e) { logger.warn("Unable to close the InputStream " + requestStream); } } if (zipFile != null) { try { zipFile.close(); } catch (IOException e) { logger.warn("Unable to close the Zip file."); } } } }
From source file:org.broad.igv.feature.genome.GenomeManager.java
/** * Gets a list of all the locally cached genome archive files that * IGV knows about.// w ww .j a v a 2 s .c o m * * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ private List<GenomeListItem> getCachedGenomeArchiveList() throws IOException { if (cachedGenomeArchiveList == null) { cachedGenomeArchiveList = new LinkedList<GenomeListItem>(); if (!DirectoryManager.getGenomeCacheDirectory().exists()) { return cachedGenomeArchiveList; } File[] files = DirectoryManager.getGenomeCacheDirectory().listFiles(); for (File file : files) { if (file.isDirectory()) { continue; } if (!file.getName().toLowerCase().endsWith(Globals.GENOME_FILE_EXTENSION)) { continue; } ZipFile zipFile = null; FileInputStream fis = null; ZipInputStream zipInputStream = null; try { zipFile = new ZipFile(file); fis = new FileInputStream(file); zipInputStream = new ZipInputStream(new BufferedInputStream(fis)); ZipEntry zipEntry = zipFile.getEntry(Globals.GENOME_ARCHIVE_PROPERTY_FILE_NAME); if (zipEntry == null) { continue; // Should never happen } InputStream inputStream = zipFile.getInputStream(zipEntry); Properties properties = new Properties(); properties.load(inputStream); int version = 0; if (properties.containsKey(Globals.GENOME_ARCHIVE_VERSION_KEY)) { try { version = Integer.parseInt(properties.getProperty(Globals.GENOME_ARCHIVE_VERSION_KEY)); } catch (Exception e) { log.error("Error parsing genome version: " + version, e); } } GenomeListItem item = new GenomeListItem( properties.getProperty(Globals.GENOME_ARCHIVE_NAME_KEY), file.getAbsolutePath(), properties.getProperty(Globals.GENOME_ARCHIVE_ID_KEY)); cachedGenomeArchiveList.add(item); } catch (ZipException ex) { log.error("\nZip error unzipping cached genome.", ex); try { file.delete(); zipInputStream.close(); } catch (Exception e) { //ignore exception when trying to delete file } } catch (IOException ex) { log.warn("\nIO error unzipping cached genome.", ex); try { file.delete(); } catch (Exception e) { //ignore exception when trying to delete file } } finally { try { if (zipInputStream != null) { zipInputStream.close(); } if (zipFile != null) { zipFile.close(); } if (fis != null) { fis.close(); } } catch (IOException ex) { log.warn("Error closing genome zip stream!", ex); } } } } return cachedGenomeArchiveList; }
From source file:com.mirth.connect.server.controllers.DefaultExtensionController.java
@Override public InstallationResult extractExtension(InputStream inputStream) { Throwable cause = null;//from w w w. j a va 2 s . c om Set<MetaData> metaDataSet = new HashSet<MetaData>(); File installTempDir = new File(ExtensionController.getExtensionsPath(), "install_temp"); if (!installTempDir.exists()) { installTempDir.mkdir(); } File tempFile = null; FileOutputStream tempFileOutputStream = null; ZipFile zipFile = null; try { /* * create a new temp file (in the install temp dir) to store the zip file contents */ tempFile = File.createTempFile(ServerUUIDGenerator.getUUID(), ".zip", installTempDir); // write the contents of the multipart fileitem to the temp file try { tempFileOutputStream = new FileOutputStream(tempFile); IOUtils.copy(inputStream, tempFileOutputStream); } finally { IOUtils.closeQuietly(tempFileOutputStream); } // create a new zip file from the temp file zipFile = new ZipFile(tempFile); // get a list of all of the entries in the zip file Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); String entryName = entry.getName(); if (entryName.endsWith("plugin.xml") || entryName.endsWith("destination.xml") || entryName.endsWith("source.xml")) { // parse the extension metadata xml file MetaData extensionMetaData = serializer .deserialize(IOUtils.toString(zipFile.getInputStream(entry)), MetaData.class); metaDataSet.add(extensionMetaData); if (!extensionLoader.isExtensionCompatible(extensionMetaData)) { if (cause == null) { cause = new VersionMismatchException("Extension \"" + entry.getName() + "\" is not compatible with this version of Mirth Connect."); } } } } if (cause == null) { // reset the entries and extract entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (entry.isDirectory()) { /* * assume directories are stored parents first then children. * * TODO: this is not robust, just for demonstration purposes. */ File directory = new File(installTempDir, entry.getName()); directory.mkdir(); } else { // otherwise, write the file out to the install temp dir InputStream zipInputStream = zipFile.getInputStream(entry); OutputStream outputStream = new BufferedOutputStream( new FileOutputStream(new File(installTempDir, entry.getName()))); IOUtils.copy(zipInputStream, outputStream); IOUtils.closeQuietly(zipInputStream); IOUtils.closeQuietly(outputStream); } } } } catch (Throwable t) { cause = new ControllerException("Error extracting extension. " + t.toString(), t); } finally { if (zipFile != null) { try { zipFile.close(); } catch (Exception e) { cause = new ControllerException(e); } } // delete the temp file since it is no longer needed FileUtils.deleteQuietly(tempFile); } return new InstallationResult(cause, metaDataSet); }
From source file:edu.umd.cs.marmoset.utilities.ZipExtractor.java
/** * Extract the zip file./* w w w . j a v a 2 s .c o m*/ * @throws IOException * @throws BuilderException */ public void extract(File directory) throws IOException, ZipExtractorException { ZipFile z = new ZipFile(zipFile); Pattern badName = Pattern.compile("[\\p{Cntrl}<>]"); try { Enumeration<? extends ZipEntry> entries = z.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); String entryName = entry.getName(); if (!shouldExtract(entryName)) continue; if (badName.matcher(entryName).find()) { if (entry.getSize() > 0) getLog().debug("Skipped entry of length " + entry.getSize() + " with bad file name " + java.net.URLEncoder.encode(entryName, "UTF-8")); continue; } try { // Get the filename to extract the entry into. // Subclasses may define this to be something other // than the entry name. String entryFileName = transformFileName(entryName); if (!entryFileName.equals(entryName)) { getLog().debug("Transformed zip entry name: " + entryName + " ==> " + entryFileName); } entriesExtractedFromZipArchive.add(entryFileName); File entryOutputFile = new File(directory, entryFileName).getAbsoluteFile(); File parentDir = entryOutputFile.getParentFile(); if (!parentDir.exists()) { if (!parentDir.mkdirs()) { throw new ZipExtractorException( "Couldn't make directory for entry output file " + entryOutputFile.getPath()); } } if (!parentDir.isDirectory()) { throw new ZipExtractorException( "Parent directory for entry " + entryOutputFile.getPath() + " is not a directory"); } // Make sure the entry output file lies within the build directory. // A malicious zip file might have ".." components in it. getLog().trace("entryOutputFile path: " + entryOutputFile.getCanonicalPath()); if (!entryOutputFile.getCanonicalPath().startsWith(directory.getCanonicalPath() + "/")) { if (!entry.isDirectory()) getLog().warn("Zip entry " + entryName + " accesses a path " + entryOutputFile.getPath() + "outside the build directory " + directory.getPath()); continue; } if (entry.isDirectory()) { entryOutputFile.mkdir(); continue; } // Extract the entry InputStream entryInputStream = null; OutputStream entryOutputStream = null; try { entryInputStream = z.getInputStream(entry); entryOutputStream = new BufferedOutputStream(new FileOutputStream(entryOutputFile)); CopyUtils.copy(entryInputStream, entryOutputStream); } finally { IOUtils.closeQuietly(entryInputStream); IOUtils.closeQuietly(entryOutputStream); } // Hook for subclasses, to specify when entries are // successfully extracted. successfulFileExtraction(entryName, entryFileName); ++numFilesExtacted; } catch (RuntimeException e) { getLog().error("Error extracting " + entryName, e); throw e; } } } finally { z.close(); } }