List of usage examples for java.util.zip ZipFile close
public void close() throws IOException
From source file:it.readbeyond.minstrel.librarian.FormatHandlerAbstractZIP.java
protected void extractCover(File f, Format format, String publicationID) { String destinationName = publicationID + "." + format.getName() + ".png"; String entryName = format.getMetadatum("internalPathCover"); if ((entryName == null) || (entryName.equals(""))) { format.addMetadatum("relativePathThumbnail", ""); return;//from w w w . j a v a 2 s . c om } try { ZipFile zipFile = new ZipFile(f, ZipFile.OPEN_READ); ZipEntry entry = zipFile.getEntry(entryName); File destFile = new File(this.thumbnailDirectoryPath, "orig-" + destinationName); String destinationPath = destFile.getAbsolutePath(); BufferedInputStream is = new BufferedInputStream(zipFile.getInputStream(entry)); int numberOfBytesRead; byte data[] = new byte[BUFFER_SIZE]; FileOutputStream fos = new FileOutputStream(destFile); BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER_SIZE); while ((numberOfBytesRead = is.read(data, 0, BUFFER_SIZE)) > -1) { dest.write(data, 0, numberOfBytesRead); } dest.flush(); dest.close(); is.close(); fos.close(); // create thumbnail FileInputStream fis = new FileInputStream(destinationPath); Bitmap imageBitmap = BitmapFactory.decodeStream(fis); imageBitmap = Bitmap.createScaledBitmap(imageBitmap, this.thumbnailWidth, this.thumbnailHeight, false); ByteArrayOutputStream baos = new ByteArrayOutputStream(); imageBitmap.compress(Bitmap.CompressFormat.PNG, 100, baos); byte[] imageData = baos.toByteArray(); // write thumbnail to file File destFile2 = new File(this.thumbnailDirectoryPath, destinationName); String destinationPath2 = destFile2.getAbsolutePath(); FileOutputStream fos2 = new FileOutputStream(destFile2); fos2.write(imageData, 0, imageData.length); fos2.flush(); fos2.close(); baos.close(); // close ZIP zipFile.close(); // delete original cover destFile.delete(); // set relativePathThumbnail format.addMetadatum("relativePathThumbnail", destinationName); } catch (Exception e) { // nop } }
From source file:it.readbeyond.minstrel.librarian.FormatHandlerEPUB.java
public Publication parseFile(File file) { Publication p = super.parseFile(file); Format format = new Format(EPUB_FORMAT); pathThumbnailSourceRelativeToOPF = null; coverManifestItemID = null;/* w ww . ja v a 2 s . co m*/ String OPFPath = this.getOPFPath(file); if (OPFPath != null) { try { ZipFile zipFile = new ZipFile(file, ZipFile.OPEN_READ); ZipEntry OPFEntry = zipFile.getEntry(OPFPath); if (OPFEntry != null) { InputStream is; // first pass: parse metadata is = zipFile.getInputStream(OPFEntry); parser = Xml.newPullParser(); parser.setFeature(XmlPullParser.FEATURE_PROCESS_NAMESPACES, true); parser.setInput(is, null); parser.nextTag(); this.parsePackage(format, 1); is.close(); // second pass: parse manifest is = zipFile.getInputStream(OPFEntry); parser = Xml.newPullParser(); parser.setFeature(XmlPullParser.FEATURE_PROCESS_NAMESPACES, true); parser.setInput(is, null); parser.nextTag(); this.parsePackage(format, 2); is.close(); // item is valid p.isValid(true); // set the cover path, relative to the EPUB container (aka ZIP) root if (pathThumbnailSourceRelativeToOPF != null) { // concatenate OPFPath parent directory and pathThumbnailSourceRelativeToOPF File tmp = new File(new File(OPFPath).getParent(), pathThumbnailSourceRelativeToOPF); // remove leading "/" format.addMetadatum("internalPathCover", tmp.getAbsolutePath().substring(1)); } } zipFile.close(); } catch (Exception e) { // invalidate item, so it will not be added to library p.isValid(false); } } p.addFormat(format); // extract cover super.extractCover(file, format, p.getID()); return p; }
From source file:fr.gouv.finances.dgfip.xemelios.importers.archives.ArchiveImporter.java
public Errors doImport() { Errors errors = new Errors(); try {/* w ww .ja va 2 s . c om*/ ZipFile zipArchive = new ZipFile(fileToImport); ZipEntry manifesteEntry = zipArchive.getEntry(MANIFESTE_FILE_NAME); archiveManifeste = getManisfesteFromArchive(zipArchive.getInputStream(manifesteEntry)); archiveManifeste.getRootElement() .addAttribute(new Attribute("archive-name", getArchiveName(fileToImport))); zipArchive.close(); HashMap<String, Object> importProperties = extractPropertiesFromArchiveManifeste(archiveManifeste); for (String docType : (String[]) importProperties.get("archiveDocumentTypes")) { if (!docType.equals("PJ") && !DataLayerManager.getImplementation().canImportDocument(docType, getUser())) { errors.addError(Errors.SEVERITY_WARNING, "Impossible d'importer ce type de document (" + docType + "), la base de donne doit d'abord tre mise jour."); } } importedArchiveManifeste = DataLayerManager.getImplementation() .getManifesteFromArchive(importProperties.get("archiveName").toString(), getUser()); definePropertiesFromImportedManifeste(importedArchiveManifeste, importProperties); Element historique = null; if (importedArchiveManifeste != null) { historique = (Element) importedArchiveManifeste .query("/m:manifeste/m:evenements", getNamespaceCtx()).get(0); // pour avoir un lment sans parent et pouvoir l'ajouter o on veut historique = new Element(historique); } else { historique = new Element("evenements"); } archiveManifeste.getRootElement().appendChild(historique); boolean sectionApplied = false; for (SectionModel section : rules.getSections()) { SectionModel theSection = section; if (theSection.getPredicat().matches(importProperties)) { logger.info("Application de la rgle " + theSection.getName()); int globalOverwriteRule = OVERWRITE_RULE_UNSET; if (theSection.getActions().isUsePreviousSection()) { // alors il y a forcment un manifeste import, et on va aller chercher sa section Element sectionElement = (Element) importedArchiveManifeste .query("/m:manifeste/rul:section", getNamespaceCtx()).get(0); if (sectionElement == null) throw new ImportException(new Errors.Error(Errors.SEVERITY_ERROR, "la section " + theSection.getName() + " impose l'application de la section du prcdente import, mais celui-ci n'a pas t trouv."), null); theSection = new SectionModel(sectionElement); // et on supprime toutes les donnes de l'archive HashMap<String, DocumentModel> docsToDrop = new HashMap<String, DocumentModel>(); for (String docId : (String[]) importProperties.get("archiveImportedDocumentTypes")) { docsToDrop.put(docId, documentsModel.getDocumentById(docId)); } DataLayerManager.getImplementation().removeArchive(docsToDrop, importProperties.get("archiveName").toString(), getUser()); Nodes deleteActions = importedArchiveManifeste.query("/m:manifeste/m:on-delete/m:action", getNamespaceCtx()); for (int i = 0; i < deleteActions.size(); i++) { Element action = (Element) deleteActions.get(i); doApplyAction(action); } // a ce stade, aucune mise jour faire dans le manifeste, tous les documents sont supprims } else { if (importedArchiveManifeste != null) { // il faut reprendre l'historique de chacun des documents Nodes importedDocuments = importedArchiveManifeste.query("//m:document", getNamespaceCtx()); for (int i = 0; i < importedDocuments.size(); i++) { Element importedDoc = (Element) importedDocuments.get(i); Element thisDoc = getElement( archiveManifeste.query("/manifeste/documents/document[@path='" + importedDoc.getAttributeValue("path") + "']")); if (thisDoc != null) { String __imported = importedDoc.getAttributeValue("imported"); thisDoc.addAttribute(new Attribute("imported", __imported)); if ("Oui".equals(__imported)) { Element result = getElement( importedDoc.query("m:resultatimport", getNamespaceCtx())); if (result != null) thisDoc.appendChild(new Element(result)); } } } } } if (theSection.getOverwriteRule() != null) { if (EtatImporteur.OVERWRITE_RULE_ALWAYS.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_OVERWRITE; else if (EtatImporteur.OVERWRITE_RULE_NEVER.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_SKIP; } // on rcupre la liste des documents importer pour pouvoir contrler qu'ils sont bien disponibles List<Element> documentsToImport = theSection.getDocumentsToImport(archiveManifeste, applicationProperties); if (documentsToImport.size() > 0) { TreeSet<String> volumesRequired = new TreeSet<String>(); for (Element filePlannedToImport : documentsToImport) { volumesRequired.add(filePlannedToImport.getAttributeValue("volume")); } int maxVolumes = Integer.parseInt(volumesRequired.last()); File[] volumes = new File[maxVolumes + 1]; for (String volume : volumesRequired) { String fichierVolume = archiveManifeste .query("/manifeste/volumes/volume[@num=" + volume + "]/@fichier").get(0) .getValue(); File f = new File(fileToImport.getParentFile(), fichierVolume); if (!f.exists()) { errors.addError(Errors.SEVERITY_ERROR, f.getAbsolutePath() + " non trouv"); } volumes[Integer.parseInt(volume)] = f; } if (!errors.containsError()) { logger.info("displayProgress(" + (documentsToImport.size() + 1) + ")"); // ici, on ouvre une porte pour permettre de faire des modifs dans l'ArchiveImporteur preImport(theSection, archiveManifeste); importServiceProvider.displayProgress(documentsToImport.size() + 1); boolean doImport = false; boolean doDelete = false; // on traite les actions for (XomDefinitionable dd : theSection.getActions().getActions()) { if (dd instanceof ImportModel) { //ImportModel im = (ImportModel)dd; // on a dj dtermin la liste des fichiers importer, donc on les importe for (Element documentToImport : documentsToImport) { int vol = Integer.parseInt(documentToImport.getAttributeValue("volume")); try { FileInfo fileInfo = doImportDocument(documentToImport, volumes[vol], importProperties, globalOverwriteRule); if (fileInfo.getInProcessException() != null) errors.addError(Errors.SEVERITY_ERROR, fileInfo.getInProcessException().getMessage()); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); if (fileInfo.getGlobalCount() == 0) { // rien n'a t import, probablement parce que overwrite rule disait // qu'il ne fallait pas importer. Donc on ne modifie rien. } else { Element result = new Element("resultatimport"); result.addAttribute(new Attribute("Duree", DateUtils.durationToString(fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils .formatXsDateTime(new Date(fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils .formatXsDateTime(new Date(fileInfo.getFinImport())))); // on supprime le prcdent rsultat d'import, si il existait... Nodes previousResults = documentToImport.query( "resultatimport | m:resultatimport", getNamespaceCtx()); for (int i = previousResults.size() - 1; i >= 0; i--) { Element __res = (Element) previousResults.get(i); documentToImport.removeChild(__res); } documentToImport.insertChild(result, 0); documentToImport.addAttribute(new Attribute("imported", "Oui")); } // on applique les ventuelles actions portant sur ce document Nodes actions = archiveManifeste.query("/manifeste/action[@depends-on='" + documentToImport.getAttributeValue("path") + "']"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo actFileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = actFileInfo; else __fileInfo.merge(actFileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } catch (Exception ex) { logger.error( "while importing " + documentToImport.getAttributeValue("path"), ex); documentToImport.addAttribute( new Attribute("imported", "Erreur: " + ex.getMessage())); } } doImport = true; } else if (dd instanceof DeleteModel) { importServiceProvider.startLongWait(); DeleteModel dm = (DeleteModel) dd; if (dm.getArchive() != null) { String archiveName = null; if ("archiveName".equals(dm.getArchive())) { // on remplace par le nom de l'archive archiveName = importProperties.get("archiveName").toString(); // pour le moment, on n'autorise pas la suppression d'une autre archive HashMap<String, DocumentModel> map = new HashMap<String, DocumentModel>(); for (String s : (String[]) importProperties .get("archiveDocumentTypes")) { map.put(s, documentsModel.getDocumentById(s)); } DataLayerManager.getImplementation().removeArchive(map, archiveName, getUser()); Nodes documents = archiveManifeste .query("/manifeste/documents/document"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement(doc.query( "m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique toutes les actions, puisqu'on a supprim tous les documents Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else if (dm.getTypeDoc() != null) { if (dm.getCollectivite() != null) { if (dm.getBudget() != null) { if (dm.getFileName() != null) { DataLayerManager.getImplementation().removeDocument( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), dm.getFileName(), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "' and ends-with(@path,'" + dm.getFileName() + "')]"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[ends-with(@depends-on,'" + dm.getFileName() + "')]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else { DataLayerManager.getImplementation().removeBudget( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeCollectivite( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeDocumentModel( documentsModel.getDocumentById(dm.getTypeDoc()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } } doDelete = true; } importServiceProvider.endLongWait(); } if (doImport) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste.query("/manifeste/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doImport) { // Pour les patchs edmn on applique les actions Nodes actions = archiveManifeste.query("/manifeste/actions/action"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doDelete) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } // dfinir ici si il y a des donnes ou non if (archiveManifeste.query("/manifeste/documents/document[@imported='Oui']").size() > 0) archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Oui")); else archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Non")); // on ajoute les actions que l'on a pratiqu dans le manifeste, // pour savoir quoi refaire plus tard... archiveManifeste.getRootElement().appendChild(theSection.getXomDefinition()); sectionApplied = true; break; } else { // il n'y avait rien importer, mais la section a quand mme t importe sectionApplied = true; break; } } } if (sectionApplied) { // on a trait toutes les sections, peut-tre il ne s'est rien pass... boolean somethingHasBeenImported = false; Nodes nodes = archiveManifeste.query("//document[@imported]"); somethingHasBeenImported = nodes.size() > 0; // on recherche tous les documents qui n'ont pas t traits, et on positionne un @imported='false' nodes = archiveManifeste.query("//document[not(@imported)]"); for (int i = 0; i < nodes.size(); i++) { Element el = (Element) nodes.get(i); el.addAttribute(new Attribute("imported", "Non")); } archiveManifeste.getRootElement() .addAttribute(new Attribute("imported", Boolean.toString(somethingHasBeenImported))); Element result = new Element("resultatimport"); if (result != null) { // on sait jamais... en cas de suppression par exemple... if (__fileInfo != null) { result.addAttribute( new Attribute("Duree", DateUtils.durationToString(__fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils.formatXsDateTime(new Date(__fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils.formatXsDateTime(new Date(__fileInfo.getFinImport())))); result.addAttribute(new Attribute("User", getUser().getId())); result.addAttribute(new Attribute("LastModify", DateUtils.formatXsDateTime(new Date(__fileInfo.getLastModify())))); result.appendChild(__fileInfo.toXomXml(documentsModel)); } } archiveManifeste.getRootElement().appendChild(result); // l'historique des imports Element event = new Element("evenement"); event.addAttribute(new Attribute("date", DateUtils.formatXsDateTime(new Date()))); event.addAttribute(new Attribute("user", getUser().getId())); event.addAttribute(new Attribute("section", archiveManifeste .query("/manifeste/rul:section/@name", getNamespaceCtx()).get(0).getValue())); String version = archiveManifeste.getRootElement().getAttributeValue("version"); if (version == null || version.length() == 0) version = "1"; event.addAttribute(new Attribute("version-archive", version)); historique.insertChild(event, 0); doImportManifeste(archiveManifeste, importProperties.get("archiveName").toString()); DataLayerManager.getImplementation() .declareArchiveImported(importProperties.get("archiveName").toString(), user); // System.out.println(archiveManifeste.toXML()); } else { errors.addError(Errors.SEVERITY_WARNING, "Cette archive ne peut tre importe par aucune des rgles de cette configuration."); } } catch (XPathExpressionException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (IOException ioEx) { logger.error(fileToImport.getAbsolutePath() + ": " + ioEx.getMessage(), ioEx); errors.addError(Errors.SEVERITY_ERROR, ioEx.getMessage()); } catch (ImportException iEx) { Errors.Error error = iEx.error; errors.addError(error); } catch (DataConfigurationException dcEx) { logger.error(dcEx.getMessage(), dcEx); errors.addError(Errors.SEVERITY_ERROR, dcEx.getMessage()); } catch (DataAccessException daEx) { logger.error(daEx.getMessage(), daEx); errors.addError(Errors.SEVERITY_ERROR, daEx.getMessage()); } catch (UnauthorizedException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (Throwable t) { t.printStackTrace(); errors.addError(Errors.SEVERITY_ERROR, t.getMessage()); } finally { // try { zipArchive.close(); } catch(Exception ex) {} } return errors; }
From source file:org.jvnet.hudson.plugins.thinbackup.backup.BackupSet.java
private boolean initializeFromZipFile() { boolean success = true; ZipFile zipFile = null; try {//from www . j av a2s . com Utils.waitUntilFileCanBeRead(backupSetzipFile); zipFile = new ZipFile(backupSetzipFile); final Enumeration<? extends ZipEntry> zipEntries = zipFile.entries(); while (zipEntries.hasMoreElements() && success) { final ZipEntry entry = zipEntries.nextElement(); String tmpName = entry.getName(); tmpName = tmpName.substring(0, tmpName.indexOf(File.separator)); if (tmpName.startsWith(BackupType.FULL.toString())) { if ((fullBackupName == null) || fullBackupName.equals(tmpName)) { fullBackupName = tmpName; } else { LOGGER.warning(String.format( "Backup set '%s' contains multiple full backups and is therefore not valid.", zipFile.getName())); success = false; } } else if (tmpName.startsWith(BackupType.DIFF.toString()) && !diffBackupsNames.contains(tmpName)) { diffBackupsNames.add(tmpName); } } } catch (final IOException e) { LOGGER.log(Level.SEVERE, String.format("Cannot initialize BackupSet from ZIP file '%s'.", backupSetzipFile.getName()), e); success = false; } finally { try { if (zipFile != null) { zipFile.close(); } } catch (final IOException e) { LOGGER.log(Level.SEVERE, String.format("Cannot close ZIP file '%s'.", backupSetzipFile.getName()), e); success = false; } } return success; }
From source file:bammerbom.ultimatecore.bukkit.UltimateUpdater.java
/** * Part of Zip-File-Extractor, modified by Gravity for use with Updater. * * @param file the location of the file to extract. *//* ww w .jav a 2 s. c om*/ private void unzip(String file) { final File fSourceZip = new File(file); try { final String zipPath = file.substring(0, file.length() - 4); ZipFile zipFile = new ZipFile(fSourceZip); Enumeration<? extends ZipEntry> e = zipFile.entries(); while (e.hasMoreElements()) { ZipEntry entry = e.nextElement(); File destinationFilePath = new File(zipPath, entry.getName()); destinationFilePath.getParentFile().mkdirs(); if (!entry.isDirectory()) { final BufferedInputStream bis = new BufferedInputStream(zipFile.getInputStream(entry)); int b; final byte[] buffer = new byte[UltimateUpdater.BYTE_SIZE]; final FileOutputStream fos = new FileOutputStream(destinationFilePath); final BufferedOutputStream bos = new BufferedOutputStream(fos, UltimateUpdater.BYTE_SIZE); while ((b = bis.read(buffer, 0, UltimateUpdater.BYTE_SIZE)) != -1) { bos.write(buffer, 0, b); } bos.flush(); bos.close(); bis.close(); final String name = destinationFilePath.getName(); if (name.endsWith(".jar") && this.pluginExists(name)) { File output = new File(updateFolder, name); destinationFilePath.renameTo(output); } } } zipFile.close(); // Move any plugin data folders that were included to the right place, Bukkit won't do this for us. moveNewZipFiles(zipPath); } catch (final IOException e) { } finally { fSourceZip.delete(); } }
From source file:com.redhat.ceylon.compiler.java.test.cmr.CMRTests.java
@Test public void testMdlSuppressObsoleteClasses() throws IOException { File sourceFile = new File(getPackagePath(), "modules/single/SuppressClass.ceylon"); copy(new File(getPackagePath(), "modules/single/SuppressClass_1.ceylon"), sourceFile); CeyloncTaskImpl compilerTask = getCompilerTask("modules/single/module.ceylon", "modules/single/SuppressClass.ceylon"); Boolean success = compilerTask.call(); assertTrue(success);//from w w w. j a va 2 s.c om File carFile = getModuleArchive("com.redhat.ceylon.compiler.java.test.cmr.modules.single", "6.6.6"); assertTrue(carFile.exists()); ZipFile car = new ZipFile(carFile); ZipEntry oneClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/One.class"); assertNotNull(oneClass); ZipEntry twoClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/Two.class"); assertNotNull(twoClass); car.close(); copy(new File(getPackagePath(), "modules/single/SuppressClass_2.ceylon"), sourceFile); compilerTask = getCompilerTask("modules/single/module.ceylon", "modules/single/SuppressClass.ceylon"); success = compilerTask.call(); assertTrue(success); carFile = getModuleArchive("com.redhat.ceylon.compiler.java.test.cmr.modules.single", "6.6.6"); assertTrue(carFile.exists()); car = new ZipFile(carFile); oneClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/One.class"); assertNotNull(oneClass); twoClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/Two.class"); assertNull(twoClass); car.close(); sourceFile.delete(); }
From source file:com.web.server.SARDeployer.java
/** * This method extracts the SAR archive and configures for the SAR and starts the services * @param file/*from w w w. j a va 2 s . co m*/ * @param warDirectoryPath * @throws IOException */ public void extractSar(File file, String warDirectoryPath) throws IOException { ZipFile zip = new ZipFile(file); ZipEntry ze = null; String fileName = file.getName(); fileName = fileName.substring(0, fileName.indexOf('.')); fileName += "sar"; String fileDirectory; CopyOnWriteArrayList classPath = new CopyOnWriteArrayList(); Enumeration<? extends ZipEntry> entries = zip.entries(); int numBytes; while (entries.hasMoreElements()) { ze = entries.nextElement(); // //System.out.println("Unzipping " + ze.getName()); String filePath = deployDirectory + "/" + fileName + "/" + ze.getName(); if (!ze.isDirectory()) { fileDirectory = filePath.substring(0, filePath.lastIndexOf('/')); } else { fileDirectory = filePath; } // //System.out.println(fileDirectory); createDirectory(fileDirectory); if (!ze.isDirectory()) { FileOutputStream fout = new FileOutputStream(filePath); byte[] inputbyt = new byte[8192]; InputStream istream = zip.getInputStream(ze); while ((numBytes = istream.read(inputbyt, 0, inputbyt.length)) >= 0) { fout.write(inputbyt, 0, numBytes); } fout.close(); istream.close(); if (ze.getName().endsWith(".jar")) { classPath.add(filePath); } } } zip.close(); URLClassLoader loader = (URLClassLoader) ClassLoader.getSystemClassLoader(); URL[] urls = loader.getURLs(); WebClassLoader sarClassLoader = new WebClassLoader(urls); for (int index = 0; index < classPath.size(); index++) { System.out.println("file:" + classPath.get(index)); new WebServer().addURL(new URL("file:" + classPath.get(index)), sarClassLoader); } new WebServer().addURL(new URL("file:" + deployDirectory + "/" + fileName + "/"), sarClassLoader); sarsMap.put(fileName, sarClassLoader); System.out.println(sarClassLoader.geturlS()); try { Sar sar = (Sar) sardigester.parse(new InputSource( new FileInputStream(deployDirectory + "/" + fileName + "/META-INF/" + "mbean-service.xml"))); CopyOnWriteArrayList mbeans = sar.getMbean(); MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); System.out.println(mbs); ObjectName objName; for (int index = 0; index < mbeans.size(); index++) { Mbean mbean = (Mbean) mbeans.get(index); System.out.println(mbean.getObjectname()); System.out.println(mbean.getCls()); objName = new ObjectName(mbean.getObjectname()); Class helloWorldService = sarClassLoader.loadClass(mbean.getCls()); Object obj = helloWorldService.newInstance(); if (mbs.isRegistered(objName)) { mbs.invoke(objName, "stopService", null, null); //mbs.invoke(objName, "destroy", null, null); mbs.unregisterMBean(objName); } mbs.registerMBean(obj, objName); CopyOnWriteArrayList attrlist = mbean.getMbeanAttribute(); if (attrlist != null) { for (int count = 0; count < attrlist.size(); count++) { MBeanAttribute attr = (MBeanAttribute) attrlist.get(count); Attribute mbeanattribute = new Attribute(attr.getName(), attr.getValue()); mbs.setAttribute(objName, mbeanattribute); } } mbs.invoke(objName, "startService", null, null); } } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (SAXException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (MalformedObjectNameException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InstanceAlreadyExistsException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (MBeanRegistrationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (NotCompliantMBeanException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InstanceNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ReflectionException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (MBeanException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InvalidAttributeValueException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (AttributeNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:org.jahia.utils.zip.JahiaArchiveFileHandler.java
public Map<String, String> unzip(String path, boolean overwrite, PathFilter pathFilter, String pathPrefix) throws JahiaException { Map<String, String> unzippedFiles = new TreeMap<String, String>(); Map<File, Long> timestamps = new HashMap<File, Long>(); pathFilter = pathFilter != null ? pathFilter : PathFilter.ALL; try {//from w w w .j a va 2 s. c o m String destPath = null; FileInputStream fis = new FileInputStream(m_FilePath); BufferedInputStream bis = new BufferedInputStream(fis); ZipInputStream zis = new ZipInputStream(bis); ZipFile zf = new ZipFile(m_FilePath); ZipEntry ze = null; String zeName = null; try { while ((ze = zis.getNextEntry()) != null) { zeName = ze.getName(); String filePath = genPathFile(zeName); destPath = path + File.separator + filePath; File fo = new File(destPath); if (pathFilter.accept(pathPrefix != null ? pathPrefix + "/" + zeName : zeName)) { String loggedPath = fo.getAbsolutePath(); if (basePath != null) { loggedPath = StringUtils.substringAfter(loggedPath, basePath); } unzippedFiles.put(filePath, loggedPath); long lastModified = ze.getTime(); if (lastModified > 0) { timestamps.put(fo, lastModified); } if (ze.isDirectory()) { fo.mkdirs(); } else if (overwrite || !fo.exists()) { File parent = new File(fo.getParent()); parent.mkdirs(); FileOutputStream fos = new FileOutputStream(fo); copyStream(zis, fos); } } zis.closeEntry(); } } finally { // Important !!! zf.close(); fis.close(); zis.close(); bis.close(); } } catch (IOException ioe) { logger.error(" fail unzipping " + ioe.getMessage(), ioe); throw new JahiaException(CLASS_NAME, "faile processing unzip", JahiaException.SERVICE_ERROR, JahiaException.ERROR_SEVERITY, ioe); } // preserve last modified time stamps for (Map.Entry<File, Long> tst : timestamps.entrySet()) { try { tst.getKey().setLastModified(tst.getValue()); } catch (Exception e) { logger.warn("Unable to set last mofified date for file {}. Cause: {}", tst.getKey(), e.getMessage()); } } return unzippedFiles; }
From source file:com.dsh105.commodus.data.Updater.java
/** * Part of Zip-File-Extractor, modified by Gravity for use with Bukkit *//*w w w. j a v a2 s . co m*/ private void unzip(String file) { try { final File fSourceZip = new File(file); final String zipPath = file.substring(0, file.length() - 4); ZipFile zipFile = new ZipFile(fSourceZip); Enumeration<? extends ZipEntry> e = zipFile.entries(); while (e.hasMoreElements()) { ZipEntry entry = e.nextElement(); File destinationFilePath = new File(zipPath, entry.getName()); destinationFilePath.getParentFile().mkdirs(); if (entry.isDirectory()) { continue; } else { final BufferedInputStream bis = new BufferedInputStream(zipFile.getInputStream(entry)); int b; final byte buffer[] = new byte[Updater.BYTE_SIZE]; final FileOutputStream fos = new FileOutputStream(destinationFilePath); final BufferedOutputStream bos = new BufferedOutputStream(fos, Updater.BYTE_SIZE); while ((b = bis.read(buffer, 0, Updater.BYTE_SIZE)) != -1) { bos.write(buffer, 0, b); } bos.flush(); bos.close(); bis.close(); final String name = destinationFilePath.getName(); if (name.endsWith(".jar") && this.pluginFile(name)) { destinationFilePath.renameTo( new File(this.plugin.getDataFolder().getParent(), this.updateFolder + "/" + name)); } } entry = null; destinationFilePath = null; } e = null; zipFile.close(); zipFile = null; // Move any plugin data folders that were included to the right place, Bukkit won't do this for us. for (final File dFile : new File(zipPath).listFiles()) { if (dFile.isDirectory()) { if (this.pluginFile(dFile.getName())) { final File oFile = new File(this.plugin.getDataFolder().getParent(), dFile.getName()); // Get current dir final File[] contents = oFile.listFiles(); // List of existing files in the current dir for (final File cFile : dFile.listFiles()) // Loop through all the files in the new dir { boolean found = false; for (final File xFile : contents) // Loop through contents to see if it exists { if (xFile.getName().equals(cFile.getName())) { found = true; break; } } if (!found) { // Move the new file into the current dir cFile.renameTo(new File(oFile.getCanonicalFile() + "/" + cFile.getName())); } else { // This file already exists, so we don't need it anymore. cFile.delete(); } } } } dFile.delete(); } new File(zipPath).delete(); fSourceZip.delete(); } catch (final IOException ex) { this.plugin.getLogger() .warning("The auto-updater tried to unzip a new update file, but was unsuccessful."); this.result = Updater.UpdateResult.FAIL_DOWNLOAD; ex.printStackTrace(); } new File(file).delete(); }
From source file:it.readbeyond.minstrel.librarian.FormatHandlerCBZ.java
private List<ZipAsset> parseImagePlaylistEntry(String path, String playlistEntry) { List<ZipAsset> assets = new ArrayList<ZipAsset>(); try {//from w ww .j a v a 2 s .c o m ZipFile zipFile = new ZipFile(new File(path), ZipFile.OPEN_READ); ZipEntry ze = zipFile.getEntry(playlistEntry); String text = this.getZipEntryText(zipFile, ze); String[] lines = text.split("\n"); // if we have lines if (lines.length > 0) { for (int i = 0; i < lines.length; i++) { String line = lines[i].trim(); // do we have a file name? if ((line.length() > 0) && (!line.startsWith("#"))) { HashMap<String, String> meta = new HashMap<String, String>(); // duration if (i + 1 < lines.length) { String line2 = lines[i + 1].trim(); if ((line2.length() > 0) && (!line2.startsWith("#"))) { i += 1; meta.put("duration", line2); // title if (i + 1 < lines.length) { String line3 = lines[i + 1].trim(); if ((line3.length() > 0) && (!line3.startsWith("#"))) { i += 1; meta.put("title", line3); } } } } // generate entry path File w = new File(new File(playlistEntry).getParent(), line); line = w.getAbsolutePath().substring(1); // add asset assets.add(new ZipAsset(line, meta)); } else { // either comment or blank line => continue } } } // close ZIP zipFile.close(); } catch (Exception e) { // nothing } return assets; }