List of usage examples for java.util.zip ZipFile getInputStream
public InputStream getInputStream(ZipEntry entry) throws IOException
From source file:com.app.server.SARDeployer.java
/** * This method extracts the SAR archive and configures for the SAR and starts the services * @param file/* w w w. ja v a 2 s . c om*/ * @param warDirectoryPath * @throws IOException */ public void extractSarDeploy(ClassLoader cL, Object... args) throws IOException { CopyOnWriteArrayList classPath = null; File file = null; String fileName = ""; String fileWithPath = ""; if (args[0] instanceof File) { classPath = new CopyOnWriteArrayList(); file = (File) args[0]; fileWithPath = file.getAbsolutePath(); ZipFile zip = new ZipFile(file); ZipEntry ze = null; fileName = file.getName(); fileName = fileName.substring(0, fileName.indexOf('.')); fileName += "sar"; String fileDirectory; Enumeration<? extends ZipEntry> entries = zip.entries(); int numBytes; while (entries.hasMoreElements()) { ze = entries.nextElement(); // //log.info("Unzipping " + ze.getName()); String filePath = serverConfig.getDeploydirectory() + "/" + fileName + "/" + ze.getName(); if (!ze.isDirectory()) { fileDirectory = filePath.substring(0, filePath.lastIndexOf('/')); } else { fileDirectory = filePath; } // //log.info(fileDirectory); createDirectory(fileDirectory); if (!ze.isDirectory()) { FileOutputStream fout = new FileOutputStream(filePath); byte[] inputbyt = new byte[8192]; InputStream istream = zip.getInputStream(ze); while ((numBytes = istream.read(inputbyt, 0, inputbyt.length)) >= 0) { fout.write(inputbyt, 0, numBytes); } fout.close(); istream.close(); if (ze.getName().endsWith(".jar")) { classPath.add(filePath); } } } zip.close(); } else if (args[0] instanceof FileObject) { FileObject fileObj = (FileObject) args[0]; fileName = fileObj.getName().getBaseName(); try { fileWithPath = fileObj.getURL().toURI().toString(); } catch (URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } fileName = fileName.substring(0, fileName.indexOf('.')); fileName += "sar"; classPath = unpack(fileObj, new File(serverConfig.getDeploydirectory() + "/" + fileName + "/"), (StandardFileSystemManager) args[1]); } URLClassLoader loader = (URLClassLoader) ClassLoader.getSystemClassLoader(); URL[] urls = loader.getURLs(); WebClassLoader sarClassLoader; if (cL != null) { sarClassLoader = new WebClassLoader(urls, cL); } else { sarClassLoader = new WebClassLoader(urls); } for (int index = 0; index < classPath.size(); index++) { // log.info("file:"+classPath.get(index)); sarClassLoader.addURL(new URL("file:" + classPath.get(index))); } sarClassLoader.addURL(new URL("file:" + serverConfig.getDeploydirectory() + "/" + fileName + "/")); //log.info(sarClassLoader.geturlS()); sarsMap.put(fileWithPath, sarClassLoader); try { Sar sar = (Sar) sardigester.parse(new InputSource(new FileInputStream( serverConfig.getDeploydirectory() + "/" + fileName + "/META-INF/" + "mbean-service.xml"))); CopyOnWriteArrayList mbeans = sar.getMbean(); //log.info(mbeanServer); ObjectName objName, classLoaderObjectName = new ObjectName("com.app.server:classLoader=" + fileName); if (!mbeanServer.isRegistered(classLoaderObjectName)) { mbeanServer.registerMBean(sarClassLoader, classLoaderObjectName); } else { mbeanServer.unregisterMBean(classLoaderObjectName); mbeanServer.registerMBean(sarClassLoader, classLoaderObjectName); ; } for (int index = 0; index < mbeans.size(); index++) { Mbean mbean = (Mbean) mbeans.get(index); //log.info(mbean.getObjectname()); //log.info(mbean.getCls()); objName = new ObjectName(mbean.getObjectname()); Class service = sarClassLoader.loadClass(mbean.getCls()); if (mbeanServer.isRegistered(objName)) { //mbs.invoke(objName, "stopService", null, null); //mbs.invoke(objName, "destroy", null, null); mbeanServer.unregisterMBean(objName); } mbeanServer.createMBean(service.getName(), objName, classLoaderObjectName); //mbs.registerMBean(obj, objName); CopyOnWriteArrayList attrlist = mbean.getMbeanAttribute(); if (attrlist != null) { for (int count = 0; count < attrlist.size(); count++) { MBeanAttribute attr = (MBeanAttribute) attrlist.get(count); Attribute mbeanattribute = new Attribute(attr.getName(), attr.getValue()); mbeanServer.setAttribute(objName, mbeanattribute); } } Attribute mbeanattribute = new Attribute("ObjectName", objName); mbeanServer.setAttribute(objName, mbeanattribute); if (((String) mbeanServer.getAttribute(objName, "Deployer")).equals("true")) { mbeanServer.invoke(objName, "init", new Object[] { deployerList }, new String[] { Vector.class.getName() }); } mbeanServer.invoke(objName, "init", new Object[] { serviceList, serverConfig, mbeanServer }, new String[] { Vector.class.getName(), ServerConfig.class.getName(), MBeanServer.class.getName() }); mbeanServer.invoke(objName, "start", null, null); serviceListObjName.put(fileWithPath, objName); } } catch (Exception e) { log.error("Could not able to deploy sar archive " + fileWithPath, e); // TODO Auto-generated catch block //e.printStackTrace(); } }
From source file:net.sf.zekr.common.config.ApplicationConfig.java
private RevelationData loadRevelationData(File revelZipFile) throws IOException, ConfigurationException { ZipFile zipFile = new ZipFile(revelZipFile); InputStream is = zipFile.getInputStream(new ZipEntry(ApplicationPath.REVELATION_DESC)); if (is == null) { logger.warn("Will ignore invalid revelation data archive \"" + zipFile.getName() + "\"."); return null; }//from ww w . j a va 2s .co m PropertiesConfiguration pc = ConfigUtils.loadConfig(is, "UTF-8"); zipFile.close(); RevelationData rd = new RevelationData(); int len; if ("aya".equals(pc.getString("mode", "sura"))) { len = QuranPropertiesUtils.QURAN_AYA_COUNT; rd.mode = RevelationData.AYA_MODE; } else { len = 114; rd.mode = RevelationData.SURA_MODE; } rd.suraOrders = new int[len]; rd.orders = new int[len]; // rd.years = new int[len]; // not used for now rd.version = pc.getString("version"); String zipFileName = revelZipFile.getName(); rd.id = zipFileName.substring(0, zipFileName.length() - ApplicationPath.REVEL_PACK_SUFFIX.length()); rd.archiveFile = revelZipFile; rd.delimiter = pc.getString("delimiter", "\n"); String sig = pc.getString("signature"); byte[] sigBytes = sig.getBytes("US-ASCII"); rd.signature = sig == null ? null : Base64.decodeBase64(sigBytes); rd.loadLocalizedNames(pc, "name"); if (StringUtils.isBlank(rd.id) || rd.localizedNameMap.size() == 0 || StringUtils.isBlank(rd.version)) { logger.warn("Invalid revelation data package: \"" + rd + "\"."); return null; } return rd; }
From source file:org.bdval.BDVModel.java
/** * Load the BDVModel training platform from the specified zip file. * * @param zipFile The file to read the platform from * @return A populated platform object//from www . j a v a 2s .co m * @throws IOException if there is a problem reading from the file */ private GEOPlatformIndexed loadPlatform(final ZipFile zipFile) throws IOException { final String platformEntryName = FilenameUtils.getName(platformFilename); final Map<String, java.util.Properties> propertyMap = new HashMap<String, java.util.Properties>(); final Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { final ZipEntry entry = entries.nextElement(); final String entryName = entry.getName(); if (entryName.startsWith(platformEntryName)) { // we have a platform entry final String propertyName = StringUtils.substringBetween(entryName, platformEntryName + ".", "." + ModelFileExtension.properties.toString()); final java.util.Properties properties = new java.util.Properties(); properties.load(zipFile.getInputStream(entry)); propertyMap.put(propertyName, properties); } } return new GEOPlatformIndexed(propertyMap); }
From source file:fr.gouv.finances.dgfip.xemelios.importers.archives.ArchiveImporter.java
public Errors doImport() { Errors errors = new Errors(); try {//from w ww . j av a 2 s .com ZipFile zipArchive = new ZipFile(fileToImport); ZipEntry manifesteEntry = zipArchive.getEntry(MANIFESTE_FILE_NAME); archiveManifeste = getManisfesteFromArchive(zipArchive.getInputStream(manifesteEntry)); archiveManifeste.getRootElement() .addAttribute(new Attribute("archive-name", getArchiveName(fileToImport))); zipArchive.close(); HashMap<String, Object> importProperties = extractPropertiesFromArchiveManifeste(archiveManifeste); for (String docType : (String[]) importProperties.get("archiveDocumentTypes")) { if (!docType.equals("PJ") && !DataLayerManager.getImplementation().canImportDocument(docType, getUser())) { errors.addError(Errors.SEVERITY_WARNING, "Impossible d'importer ce type de document (" + docType + "), la base de donne doit d'abord tre mise jour."); } } importedArchiveManifeste = DataLayerManager.getImplementation() .getManifesteFromArchive(importProperties.get("archiveName").toString(), getUser()); definePropertiesFromImportedManifeste(importedArchiveManifeste, importProperties); Element historique = null; if (importedArchiveManifeste != null) { historique = (Element) importedArchiveManifeste .query("/m:manifeste/m:evenements", getNamespaceCtx()).get(0); // pour avoir un lment sans parent et pouvoir l'ajouter o on veut historique = new Element(historique); } else { historique = new Element("evenements"); } archiveManifeste.getRootElement().appendChild(historique); boolean sectionApplied = false; for (SectionModel section : rules.getSections()) { SectionModel theSection = section; if (theSection.getPredicat().matches(importProperties)) { logger.info("Application de la rgle " + theSection.getName()); int globalOverwriteRule = OVERWRITE_RULE_UNSET; if (theSection.getActions().isUsePreviousSection()) { // alors il y a forcment un manifeste import, et on va aller chercher sa section Element sectionElement = (Element) importedArchiveManifeste .query("/m:manifeste/rul:section", getNamespaceCtx()).get(0); if (sectionElement == null) throw new ImportException(new Errors.Error(Errors.SEVERITY_ERROR, "la section " + theSection.getName() + " impose l'application de la section du prcdente import, mais celui-ci n'a pas t trouv."), null); theSection = new SectionModel(sectionElement); // et on supprime toutes les donnes de l'archive HashMap<String, DocumentModel> docsToDrop = new HashMap<String, DocumentModel>(); for (String docId : (String[]) importProperties.get("archiveImportedDocumentTypes")) { docsToDrop.put(docId, documentsModel.getDocumentById(docId)); } DataLayerManager.getImplementation().removeArchive(docsToDrop, importProperties.get("archiveName").toString(), getUser()); Nodes deleteActions = importedArchiveManifeste.query("/m:manifeste/m:on-delete/m:action", getNamespaceCtx()); for (int i = 0; i < deleteActions.size(); i++) { Element action = (Element) deleteActions.get(i); doApplyAction(action); } // a ce stade, aucune mise jour faire dans le manifeste, tous les documents sont supprims } else { if (importedArchiveManifeste != null) { // il faut reprendre l'historique de chacun des documents Nodes importedDocuments = importedArchiveManifeste.query("//m:document", getNamespaceCtx()); for (int i = 0; i < importedDocuments.size(); i++) { Element importedDoc = (Element) importedDocuments.get(i); Element thisDoc = getElement( archiveManifeste.query("/manifeste/documents/document[@path='" + importedDoc.getAttributeValue("path") + "']")); if (thisDoc != null) { String __imported = importedDoc.getAttributeValue("imported"); thisDoc.addAttribute(new Attribute("imported", __imported)); if ("Oui".equals(__imported)) { Element result = getElement( importedDoc.query("m:resultatimport", getNamespaceCtx())); if (result != null) thisDoc.appendChild(new Element(result)); } } } } } if (theSection.getOverwriteRule() != null) { if (EtatImporteur.OVERWRITE_RULE_ALWAYS.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_OVERWRITE; else if (EtatImporteur.OVERWRITE_RULE_NEVER.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_SKIP; } // on rcupre la liste des documents importer pour pouvoir contrler qu'ils sont bien disponibles List<Element> documentsToImport = theSection.getDocumentsToImport(archiveManifeste, applicationProperties); if (documentsToImport.size() > 0) { TreeSet<String> volumesRequired = new TreeSet<String>(); for (Element filePlannedToImport : documentsToImport) { volumesRequired.add(filePlannedToImport.getAttributeValue("volume")); } int maxVolumes = Integer.parseInt(volumesRequired.last()); File[] volumes = new File[maxVolumes + 1]; for (String volume : volumesRequired) { String fichierVolume = archiveManifeste .query("/manifeste/volumes/volume[@num=" + volume + "]/@fichier").get(0) .getValue(); File f = new File(fileToImport.getParentFile(), fichierVolume); if (!f.exists()) { errors.addError(Errors.SEVERITY_ERROR, f.getAbsolutePath() + " non trouv"); } volumes[Integer.parseInt(volume)] = f; } if (!errors.containsError()) { logger.info("displayProgress(" + (documentsToImport.size() + 1) + ")"); // ici, on ouvre une porte pour permettre de faire des modifs dans l'ArchiveImporteur preImport(theSection, archiveManifeste); importServiceProvider.displayProgress(documentsToImport.size() + 1); boolean doImport = false; boolean doDelete = false; // on traite les actions for (XomDefinitionable dd : theSection.getActions().getActions()) { if (dd instanceof ImportModel) { //ImportModel im = (ImportModel)dd; // on a dj dtermin la liste des fichiers importer, donc on les importe for (Element documentToImport : documentsToImport) { int vol = Integer.parseInt(documentToImport.getAttributeValue("volume")); try { FileInfo fileInfo = doImportDocument(documentToImport, volumes[vol], importProperties, globalOverwriteRule); if (fileInfo.getInProcessException() != null) errors.addError(Errors.SEVERITY_ERROR, fileInfo.getInProcessException().getMessage()); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); if (fileInfo.getGlobalCount() == 0) { // rien n'a t import, probablement parce que overwrite rule disait // qu'il ne fallait pas importer. Donc on ne modifie rien. } else { Element result = new Element("resultatimport"); result.addAttribute(new Attribute("Duree", DateUtils.durationToString(fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils .formatXsDateTime(new Date(fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils .formatXsDateTime(new Date(fileInfo.getFinImport())))); // on supprime le prcdent rsultat d'import, si il existait... Nodes previousResults = documentToImport.query( "resultatimport | m:resultatimport", getNamespaceCtx()); for (int i = previousResults.size() - 1; i >= 0; i--) { Element __res = (Element) previousResults.get(i); documentToImport.removeChild(__res); } documentToImport.insertChild(result, 0); documentToImport.addAttribute(new Attribute("imported", "Oui")); } // on applique les ventuelles actions portant sur ce document Nodes actions = archiveManifeste.query("/manifeste/action[@depends-on='" + documentToImport.getAttributeValue("path") + "']"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo actFileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = actFileInfo; else __fileInfo.merge(actFileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } catch (Exception ex) { logger.error( "while importing " + documentToImport.getAttributeValue("path"), ex); documentToImport.addAttribute( new Attribute("imported", "Erreur: " + ex.getMessage())); } } doImport = true; } else if (dd instanceof DeleteModel) { importServiceProvider.startLongWait(); DeleteModel dm = (DeleteModel) dd; if (dm.getArchive() != null) { String archiveName = null; if ("archiveName".equals(dm.getArchive())) { // on remplace par le nom de l'archive archiveName = importProperties.get("archiveName").toString(); // pour le moment, on n'autorise pas la suppression d'une autre archive HashMap<String, DocumentModel> map = new HashMap<String, DocumentModel>(); for (String s : (String[]) importProperties .get("archiveDocumentTypes")) { map.put(s, documentsModel.getDocumentById(s)); } DataLayerManager.getImplementation().removeArchive(map, archiveName, getUser()); Nodes documents = archiveManifeste .query("/manifeste/documents/document"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement(doc.query( "m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique toutes les actions, puisqu'on a supprim tous les documents Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else if (dm.getTypeDoc() != null) { if (dm.getCollectivite() != null) { if (dm.getBudget() != null) { if (dm.getFileName() != null) { DataLayerManager.getImplementation().removeDocument( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), dm.getFileName(), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "' and ends-with(@path,'" + dm.getFileName() + "')]"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[ends-with(@depends-on,'" + dm.getFileName() + "')]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else { DataLayerManager.getImplementation().removeBudget( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeCollectivite( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeDocumentModel( documentsModel.getDocumentById(dm.getTypeDoc()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } } doDelete = true; } importServiceProvider.endLongWait(); } if (doImport) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste.query("/manifeste/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doImport) { // Pour les patchs edmn on applique les actions Nodes actions = archiveManifeste.query("/manifeste/actions/action"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doDelete) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } // dfinir ici si il y a des donnes ou non if (archiveManifeste.query("/manifeste/documents/document[@imported='Oui']").size() > 0) archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Oui")); else archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Non")); // on ajoute les actions que l'on a pratiqu dans le manifeste, // pour savoir quoi refaire plus tard... archiveManifeste.getRootElement().appendChild(theSection.getXomDefinition()); sectionApplied = true; break; } else { // il n'y avait rien importer, mais la section a quand mme t importe sectionApplied = true; break; } } } if (sectionApplied) { // on a trait toutes les sections, peut-tre il ne s'est rien pass... boolean somethingHasBeenImported = false; Nodes nodes = archiveManifeste.query("//document[@imported]"); somethingHasBeenImported = nodes.size() > 0; // on recherche tous les documents qui n'ont pas t traits, et on positionne un @imported='false' nodes = archiveManifeste.query("//document[not(@imported)]"); for (int i = 0; i < nodes.size(); i++) { Element el = (Element) nodes.get(i); el.addAttribute(new Attribute("imported", "Non")); } archiveManifeste.getRootElement() .addAttribute(new Attribute("imported", Boolean.toString(somethingHasBeenImported))); Element result = new Element("resultatimport"); if (result != null) { // on sait jamais... en cas de suppression par exemple... if (__fileInfo != null) { result.addAttribute( new Attribute("Duree", DateUtils.durationToString(__fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils.formatXsDateTime(new Date(__fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils.formatXsDateTime(new Date(__fileInfo.getFinImport())))); result.addAttribute(new Attribute("User", getUser().getId())); result.addAttribute(new Attribute("LastModify", DateUtils.formatXsDateTime(new Date(__fileInfo.getLastModify())))); result.appendChild(__fileInfo.toXomXml(documentsModel)); } } archiveManifeste.getRootElement().appendChild(result); // l'historique des imports Element event = new Element("evenement"); event.addAttribute(new Attribute("date", DateUtils.formatXsDateTime(new Date()))); event.addAttribute(new Attribute("user", getUser().getId())); event.addAttribute(new Attribute("section", archiveManifeste .query("/manifeste/rul:section/@name", getNamespaceCtx()).get(0).getValue())); String version = archiveManifeste.getRootElement().getAttributeValue("version"); if (version == null || version.length() == 0) version = "1"; event.addAttribute(new Attribute("version-archive", version)); historique.insertChild(event, 0); doImportManifeste(archiveManifeste, importProperties.get("archiveName").toString()); DataLayerManager.getImplementation() .declareArchiveImported(importProperties.get("archiveName").toString(), user); // System.out.println(archiveManifeste.toXML()); } else { errors.addError(Errors.SEVERITY_WARNING, "Cette archive ne peut tre importe par aucune des rgles de cette configuration."); } } catch (XPathExpressionException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (IOException ioEx) { logger.error(fileToImport.getAbsolutePath() + ": " + ioEx.getMessage(), ioEx); errors.addError(Errors.SEVERITY_ERROR, ioEx.getMessage()); } catch (ImportException iEx) { Errors.Error error = iEx.error; errors.addError(error); } catch (DataConfigurationException dcEx) { logger.error(dcEx.getMessage(), dcEx); errors.addError(Errors.SEVERITY_ERROR, dcEx.getMessage()); } catch (DataAccessException daEx) { logger.error(daEx.getMessage(), daEx); errors.addError(Errors.SEVERITY_ERROR, daEx.getMessage()); } catch (UnauthorizedException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (Throwable t) { t.printStackTrace(); errors.addError(Errors.SEVERITY_ERROR, t.getMessage()); } finally { // try { zipArchive.close(); } catch(Exception ex) {} } return errors; }
From source file:org.fireflow.client.impl.WorkflowStatementLocalImpl.java
private Map<String, InputStream> parseModelDefsFromZipFile(File processZipFile) throws InvalidModelException { Map<String, InputStream> modelDefsMap = new HashMap<String, InputStream>(); ZipFile zf = null; try {/*from ww w . j ava2s. com*/ zf = new ZipFile(processZipFile); } catch (ZipException e) { throw new InvalidModelException(e); } catch (IOException e) { throw new InvalidModelException(e); } Enumeration enu = zf.entries(); while (enu.hasMoreElements()) { ZipEntry entry = (ZipEntry) enu.nextElement(); String fileName = entry.getName(); try { if (!(entry.isDirectory())) { InputStream inputStream = zf.getInputStream(entry); modelDefsMap.put(fileName, inputStream); // ByteArrayOutputStream out = new ByteArrayOutputStream(); // // byte[] buf = new byte[1024]; // int read = 0; // do { // read = inputStream.read(buf, 0, buf.length); // if (read > 0) // out.write(buf, 0, read); // } while (read >= 0); // processDefinitionsContent.put(fileName, // out.toString("UTF-8")); } } catch (IOException e) { throw new InvalidModelException(e); } } return modelDefsMap; }
From source file:com.pari.nm.utils.backup.BackupRestore.java
public boolean unzip(File zipFile) { ZipFile zf = null; FileOutputStream fout = null; // File zipFile = new File(localDir, zipFileName); try {//from w ww .ja v a 2 s . c om zf = new ZipFile(zipFile); Enumeration en = zf.entries(); while (en.hasMoreElements()) { ZipEntry ze = (ZipEntry) en.nextElement(); String currentEntry = ze.getName(); File destFile = new File(zipFile.getParent(), currentEntry); File destinationParent = destFile.getParentFile(); if (!destinationParent.exists()) { destinationParent.mkdirs(); destinationParent.setWritable(true, false); } System.err.println("ZIP ENTRY NAME:" + currentEntry); if (!ze.isDirectory()) { InputStream in = zf.getInputStream(ze); byte[] data = new byte[4096]; int read = 0; File extractFile = new File(zipFile.getParent(), ze.getName()); fout = new FileOutputStream(extractFile); while ((read = in.read(data)) != -1) { fout.write(data, 0, read); } fout.close(); } } return true; } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (zf != null) { zf.close(); } } catch (Exception ex) { } try { if (fout != null) { fout.close(); } } catch (Exception ex) { } } return false; }
From source file:net.sf.zekr.common.config.ApplicationConfig.java
public AudioData addNewRecitationPack(File zipFileToImport, String destDir, IntallationProgressListener progressListener) throws ZekrMessageException { try {/* w ww. j a va2 s. co m*/ ZipFile zipFile = new ZipFile(zipFileToImport); InputStream is = zipFile.getInputStream(new ZipEntry(ApplicationPath.RECITATION_DESC)); if (is == null) { logger.debug( String.format("Could not find recitation descriptor %s in the root of the zip archive %s.", zipFileToImport, ApplicationPath.RECITATION_DESC)); throw new ZekrMessageException("INVALID_RECITATION_FORMAT", new String[] { zipFileToImport.getName() }); } String tempFileName = System.currentTimeMillis() + "-" + ApplicationPath.RECITATION_DESC; tempFileName = System.getProperty("java.io.tmpdir") + "/" + tempFileName; File recitPropsFile = new File(tempFileName); OutputStreamWriter output = null; InputStreamReader input = null; try { output = new OutputStreamWriter(new FileOutputStream(recitPropsFile), "UTF-8"); input = new InputStreamReader(is, "UTF-8"); IOUtils.copy(input, output); } finally { IOUtils.closeQuietly(output); IOUtils.closeQuietly(input); } logger.debug("Add new recitation: " + recitPropsFile); AudioData newAudioData = loadAudioData(recitPropsFile, false); if (newAudioData == null || newAudioData.getId() == null) { logger.debug("Invalid recitation descriptor: " + recitPropsFile); throw new ZekrMessageException("INVALID_RECITATION_FORMAT", new String[] { zipFileToImport.getName() }); } File newRecitPropsFile = new File(destDir, newAudioData.id + ".properties"); if (newRecitPropsFile.exists()) { newRecitPropsFile.delete(); } FileUtils.moveFile(recitPropsFile, newRecitPropsFile); /* ZipEntry recFolderEntry = zipFile.getEntry(newAudioData.id); if (recFolderEntry == null || !recFolderEntry.isDirectory()) { logger.warn(String.format("Recitation audio folder (%s) doesn't exist in the root of archive %s.", newAudioData.id, zipFileToImport)); throw new ZekrMessageException("INVALID_RECITATION_FORMAT", new String[] { zipFileToImport.getName() }); } */ AudioData installedAudioData = audio.get(newAudioData.id); if (installedAudioData != null) { if (newAudioData.compareTo(installedAudioData) < 0) { throw new ZekrMessageException("NEWER_VERSION_INSTALLED", new String[] { recitPropsFile.toString(), newAudioData.lastUpdate, installedAudioData.lastUpdate }); } } newAudioData.file = newRecitPropsFile; logger.info(String.format("Start uncompressing recitation: %s with size: %s to %s.", zipFileToImport.getName(), FileUtils.byteCountToDisplaySize(zipFileToImport.length()), destDir)); boolean result; try { result = ZipUtils.extract(zipFileToImport, destDir, progressListener); } finally { File file = new File(newRecitPropsFile.getParent(), ApplicationPath.RECITATION_DESC); if (file.exists()) { FileUtils.deleteQuietly(file); } } if (result) { logger.info("Uncompressing process done: " + zipFileToImport.getName()); audio.add(newAudioData); } else { logger.info("Uncompressing process intrrrupted: " + zipFileToImport.getName()); } // FileUtils.deleteQuietly(new File(newRecitPropsFile.getParent(), ApplicationPath.RECITATION_DESC)); progressListener.finish(newAudioData); return result ? newAudioData : null; } catch (ZekrMessageException zme) { throw zme; } catch (Exception e) { logger.error("Error occurred while adding new recitation archive.", e); throw new ZekrMessageException("RECITATION_LOAD_FAILED", new String[] { zipFileToImport.getName(), e.toString() }); } }
From source file:net.sf.zekr.common.config.ApplicationConfig.java
public TranslationData loadTranslationData(File transZipFile) throws IOException, ConfigurationException { TranslationData td = null;//from w w w . j ava2s . c o m ZipFile zipFile = null; try { zipFile = new ZipFile(transZipFile); InputStream is = zipFile.getInputStream(new ZipEntry(ApplicationPath.TRANSLATION_DESC)); if (is == null) { logger.warn("Will ignore invalid translation archive \"" + zipFile.getName() + "\"."); return null; } Reader reader = new InputStreamReader(is, "UTF-8"); PropertiesConfiguration pc = new PropertiesConfiguration(); pc.load(reader); reader.close(); is.close(); td = new TranslationData(); td.version = pc.getString(VERSION_ATTR); td.id = pc.getString(ID_ATTR); td.locale = new Locale(pc.getString(LANG_ATTR, "en"), pc.getString(COUNTRY_ATTR, "US")); td.encoding = pc.getString(ENCODING_ATTR, "ISO-8859-1"); td.direction = pc.getString(DIRECTION_ATTR, "ltr"); td.file = pc.getString(FILE_ATTR); td.name = pc.getString(NAME_ATTR); td.localizedName = pc.getString(LOCALIZED_NAME_ATTR, td.name); td.archiveFile = transZipFile; td.delimiter = pc.getString(LINE_DELIMITER_ATTR, "\n"); String sig = pc.getString(SIGNATURE_ATTR); td.signature = sig == null ? null : Base64.decodeBase64(sig.getBytes("US-ASCII")); //create a LocalizedInstance for this translation. // <patch> LocalizedResource localizedResource = new LocalizedResource(); localizedResource.loadLocalizedNames(pc, NAME_ATTR); localizedResource.setLanguage(td.locale.getLanguage()); td.setLocalizedResource(localizedResource); td.setFile(transZipFile); // </patch> if (StringUtils.isBlank(td.id) || StringUtils.isBlank(td.name) || StringUtils.isBlank(td.file) || StringUtils.isBlank(td.version)) { logger.warn("Invalid translation: \"" + td + "\"."); return null; } if (zipFile.getEntry(td.file) == null) { logger.warn("Invalid translation format. File not exists in the archive: " + td.file); return null; } } finally { if (zipFile != null) { ZipUtils.closeQuietly(zipFile); } } return td; }
From source file:nl.ordina.bag.etl.loader.ExtractLoader.java
protected void processBAGExtractFile(BAGExtractLevering levering, ZipFile zipFile) throws ProcessorException { //final Map<String,String> files = getFiles(levering); final Map<String, String> files = getFiles(zipFile); BAGObjectType[] objectTypes = new BAGObjectType[] { BAGObjectType.WOONPLAATS, BAGObjectType.OPENBARE_RUIMTE, BAGObjectType.NUMMERAANDUIDING, BAGObjectType.PAND, BAGObjectType.VERBLIJFSOBJECT, BAGObjectType.LIGPLAATS, BAGObjectType.STANDPLAATS }; for (final BAGObjectType objectType : objectTypes) { try {/*from w w w .j ava 2 s .c o m*/ String filename = files.get(objectType.getCode()); ZipEntry entry = zipFile.getEntry(filename); skipObjects = bagDAO.getCount(objectType); if (skipObjects > 0) logger.info("Skipping " + skipObjects + " object" + (skipObjects > 1 ? "en" : "") + " (" + objectType.toString() + ")"); ZipStreamReader zipStreamReader = new ZipStreamReader() { @Override public void handle(String filename, InputStream stream) throws IOException { if (filename.matches("\\d{4}(WPL|OPR|NUM|PND|VBO|LIG|STA)\\d{8}-\\d{6}\\.xml")) { logger.info("Processing file " + filename + " started"); processXML(stream); logger.info("Processing file " + filename + " finished"); } else logger.info("Skipping file " + filename); } }; logger.info("Processing file " + filename + " started"); zipStreamReader.read(zipFile.getInputStream(entry)); logger.info("Processing file " + filename + " finished"); } catch (DAOException e) { throw new ProcessingException(e); } catch (IOException e) { throw new ProcessingException(e); } } }
From source file:com.seajas.search.contender.service.modifier.ArchiveModifierService.java
/** * Store the not-already-cached files from the given URL and return their locations. * //from w w w .j a v a 2s. c om * @param archive * @return Map<File, String> */ private Map<File, String> storeAndDecompressFiles(final Archive archive) { Map<File, String> result = new HashMap<File, String>(); // Create the FTP client FTPClient ftpClient = retrieveFtpClient(archive.getUri()); try { // Retrieve the directory listing List<ArchiveFile> files = retrieveFiles(archive.getUri().getPath(), ftpClient, archive.getExclusionExpression()); Integer archiveNumber = -1, archiveTotal = files.size(); logger.info("Archive with name '" + archive.getName() + "' produced " + archiveTotal + " files"); // An empty archive typically indicates failure if (archiveTotal == 0) logger.warn("The given archive produced no entries - something probably went wrong"); // Handle all archive files for (ArchiveFile archiveFile : files) { archiveNumber++; // Check whether the file already exists within the cache String baseUrl = (StringUtils.hasText(archive.getUri().getScheme()) ? archive.getUri().getScheme() + "://" : "") + archive.getUri().getHost() + (archive.getUri().getPort() != -1 ? ":" + archive.getUri().getPort() : ""); if (!cacheService.isArchived(baseUrl + archiveFile.getFullPath())) { logger.info("Started decompressing archive " + archiveNumber + "/" + archiveTotal + " with name " + archiveFile.getFullPath()); // Write out the archive to disk so we can determine the MIME type File archiveFileFolder = new File(archiveFile .getTranslatedPath(packagesLocation.replace("%1", String.valueOf(archive.getId())))); if (!archiveFileFolder.exists()) archiveFileFolder.mkdirs(); File archiveFileLocation = new File(archiveFileFolder, archiveFile.getFile().getName()); InputStream archiveInputStream = ftpClient.retrieveFileStream(archiveFile.getFullPath()); OutputStream archiveOutputStream = new FileOutputStream(archiveFileLocation); IOUtils.copy(archiveInputStream, archiveOutputStream); archiveInputStream.close(); ftpClient.completePendingCommand(); archiveOutputStream.flush(); archiveOutputStream.close(); // Now unpack the archive and transform each file InputStream compressedArchiveInputStream = new FileInputStream(archiveFileLocation); // Now determine the content type and create a reader in case of structured content MediaType archiveMediaType = autoDetectParser.getDetector() .detect(new BufferedInputStream(compressedArchiveInputStream), new Metadata()); if (!(archiveMediaType.getType().equals("application") && archiveMediaType.getSubtype().equals("zip"))) { logger.warn("Archive file " + archiveFile.getFullPath() + " contains " + archiveMediaType + " data, which is not yet supported"); compressedArchiveInputStream.close(); continue; } else compressedArchiveInputStream.close(); // Create a new ZIP file from the given archive and decompress it ZipFile zipFile = new ZipFile(archiveFileLocation); File resultsLocationFolder = new File(archiveFile .getTranslatedPath(resultsLocation.replace("%1", String.valueOf(archive.getId())))); if (!resultsLocationFolder.exists()) resultsLocationFolder.mkdirs(); File resultsLocation = new File(resultsLocationFolder, stripExtension(archiveFile.getFile().getName())); if (!resultsLocation.exists()) resultsLocation.mkdirs(); logger.info("Started processing archive with name " + archiveFile.getFullPath()); Enumeration<? extends ZipEntry> zipEnumerator = zipFile.entries(); while (zipEnumerator.hasMoreElements()) { ZipEntry entry = zipEnumerator.nextElement(); // Store it locally first File entryLocation = new File(resultsLocation, entry.getName()); try { InputStream entryInputStream = zipFile.getInputStream(entry); OutputStream entryOutputStream = new FileOutputStream(entryLocation); IOUtils.copy(entryInputStream, entryOutputStream); entryInputStream.close(); entryOutputStream.close(); } catch (IOException e) { logger.error("Could not store the compressed archive entry on disk", e); continue; } } zipFile.close(); // Add it to the results result.put(resultsLocation, baseUrl + archiveFile.getFullPath()); logger.info("Finished processing archive with name " + archiveFile.getFullPath()); } else if (logger.isDebugEnabled()) logger.debug("Skipping previously processed archive with name " + archiveFile.getFullPath()); } } catch (IOException e) { logger.error("Could not close input stream during archive processing", e); } finally { try { if (ftpClient.isConnected()) ftpClient.disconnect(); } catch (IOException e) { logger.error("Could not disconnect the FTP client", e); } } return result; }