List of usage examples for java.util.zip ZipFile getEntry
public ZipEntry getEntry(String name)
From source file:com.flexive.core.storage.GenericDivisionImporter.java
/** * Get a file from the zip archive//from ww w . ja v a2 s . co m * * @param zip zip archive containing the file * @param file name of the file * @return ZipEntry * @throws FxNotFoundException if the archive does not contain the file */ protected ZipEntry getZipEntry(ZipFile zip, String file) throws FxNotFoundException { ZipEntry ze = zip.getEntry(file); if (ze == null) throw new FxNotFoundException("ex.import.missingFile", file, zip.getName()); return ze; }
From source file:fr.gouv.finances.dgfip.xemelios.importers.archives.ArchiveImporter.java
public Errors doImport() { Errors errors = new Errors(); try {// w ww . ja v a 2s. c o m ZipFile zipArchive = new ZipFile(fileToImport); ZipEntry manifesteEntry = zipArchive.getEntry(MANIFESTE_FILE_NAME); archiveManifeste = getManisfesteFromArchive(zipArchive.getInputStream(manifesteEntry)); archiveManifeste.getRootElement() .addAttribute(new Attribute("archive-name", getArchiveName(fileToImport))); zipArchive.close(); HashMap<String, Object> importProperties = extractPropertiesFromArchiveManifeste(archiveManifeste); for (String docType : (String[]) importProperties.get("archiveDocumentTypes")) { if (!docType.equals("PJ") && !DataLayerManager.getImplementation().canImportDocument(docType, getUser())) { errors.addError(Errors.SEVERITY_WARNING, "Impossible d'importer ce type de document (" + docType + "), la base de donne doit d'abord tre mise jour."); } } importedArchiveManifeste = DataLayerManager.getImplementation() .getManifesteFromArchive(importProperties.get("archiveName").toString(), getUser()); definePropertiesFromImportedManifeste(importedArchiveManifeste, importProperties); Element historique = null; if (importedArchiveManifeste != null) { historique = (Element) importedArchiveManifeste .query("/m:manifeste/m:evenements", getNamespaceCtx()).get(0); // pour avoir un lment sans parent et pouvoir l'ajouter o on veut historique = new Element(historique); } else { historique = new Element("evenements"); } archiveManifeste.getRootElement().appendChild(historique); boolean sectionApplied = false; for (SectionModel section : rules.getSections()) { SectionModel theSection = section; if (theSection.getPredicat().matches(importProperties)) { logger.info("Application de la rgle " + theSection.getName()); int globalOverwriteRule = OVERWRITE_RULE_UNSET; if (theSection.getActions().isUsePreviousSection()) { // alors il y a forcment un manifeste import, et on va aller chercher sa section Element sectionElement = (Element) importedArchiveManifeste .query("/m:manifeste/rul:section", getNamespaceCtx()).get(0); if (sectionElement == null) throw new ImportException(new Errors.Error(Errors.SEVERITY_ERROR, "la section " + theSection.getName() + " impose l'application de la section du prcdente import, mais celui-ci n'a pas t trouv."), null); theSection = new SectionModel(sectionElement); // et on supprime toutes les donnes de l'archive HashMap<String, DocumentModel> docsToDrop = new HashMap<String, DocumentModel>(); for (String docId : (String[]) importProperties.get("archiveImportedDocumentTypes")) { docsToDrop.put(docId, documentsModel.getDocumentById(docId)); } DataLayerManager.getImplementation().removeArchive(docsToDrop, importProperties.get("archiveName").toString(), getUser()); Nodes deleteActions = importedArchiveManifeste.query("/m:manifeste/m:on-delete/m:action", getNamespaceCtx()); for (int i = 0; i < deleteActions.size(); i++) { Element action = (Element) deleteActions.get(i); doApplyAction(action); } // a ce stade, aucune mise jour faire dans le manifeste, tous les documents sont supprims } else { if (importedArchiveManifeste != null) { // il faut reprendre l'historique de chacun des documents Nodes importedDocuments = importedArchiveManifeste.query("//m:document", getNamespaceCtx()); for (int i = 0; i < importedDocuments.size(); i++) { Element importedDoc = (Element) importedDocuments.get(i); Element thisDoc = getElement( archiveManifeste.query("/manifeste/documents/document[@path='" + importedDoc.getAttributeValue("path") + "']")); if (thisDoc != null) { String __imported = importedDoc.getAttributeValue("imported"); thisDoc.addAttribute(new Attribute("imported", __imported)); if ("Oui".equals(__imported)) { Element result = getElement( importedDoc.query("m:resultatimport", getNamespaceCtx())); if (result != null) thisDoc.appendChild(new Element(result)); } } } } } if (theSection.getOverwriteRule() != null) { if (EtatImporteur.OVERWRITE_RULE_ALWAYS.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_OVERWRITE; else if (EtatImporteur.OVERWRITE_RULE_NEVER.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_SKIP; } // on rcupre la liste des documents importer pour pouvoir contrler qu'ils sont bien disponibles List<Element> documentsToImport = theSection.getDocumentsToImport(archiveManifeste, applicationProperties); if (documentsToImport.size() > 0) { TreeSet<String> volumesRequired = new TreeSet<String>(); for (Element filePlannedToImport : documentsToImport) { volumesRequired.add(filePlannedToImport.getAttributeValue("volume")); } int maxVolumes = Integer.parseInt(volumesRequired.last()); File[] volumes = new File[maxVolumes + 1]; for (String volume : volumesRequired) { String fichierVolume = archiveManifeste .query("/manifeste/volumes/volume[@num=" + volume + "]/@fichier").get(0) .getValue(); File f = new File(fileToImport.getParentFile(), fichierVolume); if (!f.exists()) { errors.addError(Errors.SEVERITY_ERROR, f.getAbsolutePath() + " non trouv"); } volumes[Integer.parseInt(volume)] = f; } if (!errors.containsError()) { logger.info("displayProgress(" + (documentsToImport.size() + 1) + ")"); // ici, on ouvre une porte pour permettre de faire des modifs dans l'ArchiveImporteur preImport(theSection, archiveManifeste); importServiceProvider.displayProgress(documentsToImport.size() + 1); boolean doImport = false; boolean doDelete = false; // on traite les actions for (XomDefinitionable dd : theSection.getActions().getActions()) { if (dd instanceof ImportModel) { //ImportModel im = (ImportModel)dd; // on a dj dtermin la liste des fichiers importer, donc on les importe for (Element documentToImport : documentsToImport) { int vol = Integer.parseInt(documentToImport.getAttributeValue("volume")); try { FileInfo fileInfo = doImportDocument(documentToImport, volumes[vol], importProperties, globalOverwriteRule); if (fileInfo.getInProcessException() != null) errors.addError(Errors.SEVERITY_ERROR, fileInfo.getInProcessException().getMessage()); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); if (fileInfo.getGlobalCount() == 0) { // rien n'a t import, probablement parce que overwrite rule disait // qu'il ne fallait pas importer. Donc on ne modifie rien. } else { Element result = new Element("resultatimport"); result.addAttribute(new Attribute("Duree", DateUtils.durationToString(fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils .formatXsDateTime(new Date(fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils .formatXsDateTime(new Date(fileInfo.getFinImport())))); // on supprime le prcdent rsultat d'import, si il existait... Nodes previousResults = documentToImport.query( "resultatimport | m:resultatimport", getNamespaceCtx()); for (int i = previousResults.size() - 1; i >= 0; i--) { Element __res = (Element) previousResults.get(i); documentToImport.removeChild(__res); } documentToImport.insertChild(result, 0); documentToImport.addAttribute(new Attribute("imported", "Oui")); } // on applique les ventuelles actions portant sur ce document Nodes actions = archiveManifeste.query("/manifeste/action[@depends-on='" + documentToImport.getAttributeValue("path") + "']"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo actFileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = actFileInfo; else __fileInfo.merge(actFileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } catch (Exception ex) { logger.error( "while importing " + documentToImport.getAttributeValue("path"), ex); documentToImport.addAttribute( new Attribute("imported", "Erreur: " + ex.getMessage())); } } doImport = true; } else if (dd instanceof DeleteModel) { importServiceProvider.startLongWait(); DeleteModel dm = (DeleteModel) dd; if (dm.getArchive() != null) { String archiveName = null; if ("archiveName".equals(dm.getArchive())) { // on remplace par le nom de l'archive archiveName = importProperties.get("archiveName").toString(); // pour le moment, on n'autorise pas la suppression d'une autre archive HashMap<String, DocumentModel> map = new HashMap<String, DocumentModel>(); for (String s : (String[]) importProperties .get("archiveDocumentTypes")) { map.put(s, documentsModel.getDocumentById(s)); } DataLayerManager.getImplementation().removeArchive(map, archiveName, getUser()); Nodes documents = archiveManifeste .query("/manifeste/documents/document"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement(doc.query( "m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique toutes les actions, puisqu'on a supprim tous les documents Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else if (dm.getTypeDoc() != null) { if (dm.getCollectivite() != null) { if (dm.getBudget() != null) { if (dm.getFileName() != null) { DataLayerManager.getImplementation().removeDocument( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), dm.getFileName(), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "' and ends-with(@path,'" + dm.getFileName() + "')]"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[ends-with(@depends-on,'" + dm.getFileName() + "')]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else { DataLayerManager.getImplementation().removeBudget( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeCollectivite( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeDocumentModel( documentsModel.getDocumentById(dm.getTypeDoc()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } } doDelete = true; } importServiceProvider.endLongWait(); } if (doImport) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste.query("/manifeste/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doImport) { // Pour les patchs edmn on applique les actions Nodes actions = archiveManifeste.query("/manifeste/actions/action"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doDelete) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } // dfinir ici si il y a des donnes ou non if (archiveManifeste.query("/manifeste/documents/document[@imported='Oui']").size() > 0) archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Oui")); else archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Non")); // on ajoute les actions que l'on a pratiqu dans le manifeste, // pour savoir quoi refaire plus tard... archiveManifeste.getRootElement().appendChild(theSection.getXomDefinition()); sectionApplied = true; break; } else { // il n'y avait rien importer, mais la section a quand mme t importe sectionApplied = true; break; } } } if (sectionApplied) { // on a trait toutes les sections, peut-tre il ne s'est rien pass... boolean somethingHasBeenImported = false; Nodes nodes = archiveManifeste.query("//document[@imported]"); somethingHasBeenImported = nodes.size() > 0; // on recherche tous les documents qui n'ont pas t traits, et on positionne un @imported='false' nodes = archiveManifeste.query("//document[not(@imported)]"); for (int i = 0; i < nodes.size(); i++) { Element el = (Element) nodes.get(i); el.addAttribute(new Attribute("imported", "Non")); } archiveManifeste.getRootElement() .addAttribute(new Attribute("imported", Boolean.toString(somethingHasBeenImported))); Element result = new Element("resultatimport"); if (result != null) { // on sait jamais... en cas de suppression par exemple... if (__fileInfo != null) { result.addAttribute( new Attribute("Duree", DateUtils.durationToString(__fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils.formatXsDateTime(new Date(__fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils.formatXsDateTime(new Date(__fileInfo.getFinImport())))); result.addAttribute(new Attribute("User", getUser().getId())); result.addAttribute(new Attribute("LastModify", DateUtils.formatXsDateTime(new Date(__fileInfo.getLastModify())))); result.appendChild(__fileInfo.toXomXml(documentsModel)); } } archiveManifeste.getRootElement().appendChild(result); // l'historique des imports Element event = new Element("evenement"); event.addAttribute(new Attribute("date", DateUtils.formatXsDateTime(new Date()))); event.addAttribute(new Attribute("user", getUser().getId())); event.addAttribute(new Attribute("section", archiveManifeste .query("/manifeste/rul:section/@name", getNamespaceCtx()).get(0).getValue())); String version = archiveManifeste.getRootElement().getAttributeValue("version"); if (version == null || version.length() == 0) version = "1"; event.addAttribute(new Attribute("version-archive", version)); historique.insertChild(event, 0); doImportManifeste(archiveManifeste, importProperties.get("archiveName").toString()); DataLayerManager.getImplementation() .declareArchiveImported(importProperties.get("archiveName").toString(), user); // System.out.println(archiveManifeste.toXML()); } else { errors.addError(Errors.SEVERITY_WARNING, "Cette archive ne peut tre importe par aucune des rgles de cette configuration."); } } catch (XPathExpressionException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (IOException ioEx) { logger.error(fileToImport.getAbsolutePath() + ": " + ioEx.getMessage(), ioEx); errors.addError(Errors.SEVERITY_ERROR, ioEx.getMessage()); } catch (ImportException iEx) { Errors.Error error = iEx.error; errors.addError(error); } catch (DataConfigurationException dcEx) { logger.error(dcEx.getMessage(), dcEx); errors.addError(Errors.SEVERITY_ERROR, dcEx.getMessage()); } catch (DataAccessException daEx) { logger.error(daEx.getMessage(), daEx); errors.addError(Errors.SEVERITY_ERROR, daEx.getMessage()); } catch (UnauthorizedException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (Throwable t) { t.printStackTrace(); errors.addError(Errors.SEVERITY_ERROR, t.getMessage()); } finally { // try { zipArchive.close(); } catch(Exception ex) {} } return errors; }
From source file:org.zeroturnaround.zip.ZipUtil.java
/** * * @param zip//from w w w .j a v a 2 s . co m * zip file to traverse * @param names * names of entries to filter dirs from * @return Set<String> names of entries that are dirs. * */ static Set filterDirEntries(File zip, Collection names) { Set dirs = new HashSet(); if (zip == null) { return dirs; } ZipFile zf = null; try { zf = new ZipFile(zip); Iterator iterator = names.iterator(); while (iterator.hasNext()) { String entryName = (String) iterator.next(); ZipEntry entry = zf.getEntry(entryName); if (entry.isDirectory()) { dirs.add(entry.getName()); } else if (zf.getInputStream(entry) == null) { // no input stream means that this is a dir. dirs.add(entry.getName() + PATH_SEPARATOR); } } } catch (IOException e) { ZipExceptionUtil.rethrow(e); } finally { closeQuietly(zf); } return dirs; }
From source file:org.nuxeo.build.ant.ZipDiffTask.java
@Override public void execute() throws BuildException { ZipFile zipfile1 = null; ZipFile zipfile2 = null;/* w ww . j av a 2s .co m*/ try { zipfile1 = new ZipFile(file1); zipfile2 = new ZipFile(file2); Set<String> set1 = new LinkedHashSet<>(); for (Enumeration<? extends ZipEntry> zipEntries = zipfile1.entries(); zipEntries.hasMoreElements();) { set1.add((zipEntries.nextElement()).getName()); } Set<String> set2 = new LinkedHashSet<>(); for (Enumeration<? extends ZipEntry> zipEntries = zipfile2.entries(); zipEntries.hasMoreElements();) { set2.add((zipEntries.nextElement()).getName()); } try { if (includesfile != null) { includesfile.createNewFile(); fileWriter = new FileWriter(includesfile); } // includes (files from file1 not present or differ in file2) for (Iterator<String> i = set1.iterator(); i.hasNext();) { String filename = i.next(); if (!set2.contains(filename)) { log("Only in " + file1.getName() + ": " + filename, Project.MSG_INFO); include(filename, fileWriter); continue; } set2.remove(filename); if (!ignoreContent && !filename.matches(ignoreContentPattern)) { try { if (!IOUtils.contentEquals(zipfile1.getInputStream(zipfile1.getEntry(filename)), zipfile2.getInputStream(zipfile2.getEntry(filename)))) { log("Content differs: " + filename, Project.MSG_INFO); include(filename, fileWriter); } } catch (IOException e) { log(e, Project.MSG_WARN); } } } } catch (IOException e) { throw new BuildException(e); } finally { IOUtils.closeQuietly(fileWriter); } // excludes (files from file2 not present in file1) try { if (excludesfile != null) { excludesfile.createNewFile(); fileWriter = new FileWriter(excludesfile); } for (Iterator<String> i = set2.iterator(); i.hasNext();) { String filename = i.next(); log("Only in " + file2.getName() + ": " + filename, Project.MSG_INFO); exclude(filename, fileWriter); } } catch (IOException e) { throw new BuildException(e); } finally { IOUtils.closeQuietly(fileWriter); } } catch (IOException e) { throw new BuildException("Error opening " + file1 + " or " + file2, e); } finally { if (zipfile1 != null) { try { zipfile1.close(); } catch (IOException e) { throw new BuildException(e); } } if (zipfile2 != null) { try { zipfile2.close(); } catch (IOException e) { throw new BuildException(e); } } } }
From source file:com.redhat.ceylon.compiler.java.test.cmr.CMRTests.java
@Test public void testMdlSuppressObsoleteClasses() throws IOException { File sourceFile = new File(getPackagePath(), "modules/single/SuppressClass.ceylon"); copy(new File(getPackagePath(), "modules/single/SuppressClass_1.ceylon"), sourceFile); CeyloncTaskImpl compilerTask = getCompilerTask("modules/single/module.ceylon", "modules/single/SuppressClass.ceylon"); Boolean success = compilerTask.call(); assertTrue(success);// w ww . j a va 2 s .com File carFile = getModuleArchive("com.redhat.ceylon.compiler.java.test.cmr.modules.single", "6.6.6"); assertTrue(carFile.exists()); ZipFile car = new ZipFile(carFile); ZipEntry oneClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/One.class"); assertNotNull(oneClass); ZipEntry twoClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/Two.class"); assertNotNull(twoClass); car.close(); copy(new File(getPackagePath(), "modules/single/SuppressClass_2.ceylon"), sourceFile); compilerTask = getCompilerTask("modules/single/module.ceylon", "modules/single/SuppressClass.ceylon"); success = compilerTask.call(); assertTrue(success); carFile = getModuleArchive("com.redhat.ceylon.compiler.java.test.cmr.modules.single", "6.6.6"); assertTrue(carFile.exists()); car = new ZipFile(carFile); oneClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/One.class"); assertNotNull(oneClass); twoClass = car.getEntry("com/redhat/ceylon/compiler/java/test/cmr/modules/single/Two.class"); assertNull(twoClass); car.close(); sourceFile.delete(); }
From source file:org.nuxeo.ecm.platform.filemanager.service.extension.CSVZipImporter.java
protected Serializable getFieldValue(Field field, String stringValue, ZipFile zip) { Serializable fieldValue = null; Type type = field.getType();/*from ww w . ja va2 s .c o m*/ if (type.isSimpleType()) { if (type instanceof SimpleTypeImpl) { // consider super type instead type = type.getSuperType(); } if (type instanceof StringType) { fieldValue = stringValue; } else if (type instanceof IntegerType) { fieldValue = Integer.parseInt(stringValue); } else if (type instanceof LongType) { fieldValue = Long.parseLong(stringValue); } else if (type instanceof DateType) { try { Date date; if (stringValue.length() == 10) { date = new SimpleDateFormat("dd/MM/yyyy").parse(stringValue); } else if (stringValue.length() == 8) { date = new SimpleDateFormat("dd/MM/yy").parse(stringValue); } else { log.warn("Unknown date format :" + stringValue); return null; } fieldValue = date; } catch (ParseException e) { log.error("Error during date parsing", e); } } else { log.warn(String.format("Unsupported field type '%s'", type)); return null; } } else if (type.isComplexType()) { if (TypeConstants.CONTENT.equals(field.getName().getLocalName())) { ZipEntry blobIndex = zip.getEntry(stringValue); if (blobIndex != null) { Blob blob; try { blob = Blobs.createBlob(zip.getInputStream(blobIndex)); } catch (IOException e) { throw new RuntimeException(e); } blob.setFilename(stringValue); fieldValue = (Serializable) blob; } } } return fieldValue; }
From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java
/** * This method restores ParagraphVectors model previously saved with writeParagraphVectors() * * @return//from w w w .j a va 2 s. co m */ public static ParagraphVectors readParagraphVectors(File file) throws IOException { File tmpFileL = File.createTempFile("paravec", "l"); tmpFileL.deleteOnExit(); Word2Vec w2v = readWord2Vec(file); // and "convert" it to ParaVec model + optionally trying to restore labels information ParagraphVectors vectors = new ParagraphVectors.Builder(w2v.getConfiguration()).vocabCache(w2v.getVocab()) .lookupTable(w2v.getLookupTable()).resetModel(false).build(); ZipFile zipFile = new ZipFile(file); // now we try to restore labels information ZipEntry labels = zipFile.getEntry("labels.txt"); if (labels != null) { InputStream stream = zipFile.getInputStream(labels); Files.copy(stream, Paths.get(tmpFileL.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); try (BufferedReader reader = new BufferedReader(new FileReader(tmpFileL))) { String line; while ((line = reader.readLine()) != null) { VocabWord word = vectors.getVocab().tokenFor(decodeB64(line.trim())); if (word != null) { word.markAsLabel(true); } } } } vectors.extractLabels(); return vectors; }
From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java
/** * This method restores Word2Vec model previously saved with writeWord2VecModel * * PLEASE NOTE: This method loads FULL model, so don't use it if you're only going to use weights. * * @param file/* w ww. j a v a 2s.c o m*/ * @return * @throws IOException */ @Deprecated public static Word2Vec readWord2Vec(File file) throws IOException { File tmpFileSyn0 = File.createTempFile("word2vec", "0"); File tmpFileSyn1 = File.createTempFile("word2vec", "1"); File tmpFileC = File.createTempFile("word2vec", "c"); File tmpFileH = File.createTempFile("word2vec", "h"); File tmpFileF = File.createTempFile("word2vec", "f"); tmpFileSyn0.deleteOnExit(); tmpFileSyn1.deleteOnExit(); tmpFileH.deleteOnExit(); tmpFileC.deleteOnExit(); tmpFileF.deleteOnExit(); int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency(); boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive(); if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(false); Nd4j.getMemoryManager().setOccasionalGcFrequency(50000); try { ZipFile zipFile = new ZipFile(file); ZipEntry syn0 = zipFile.getEntry("syn0.txt"); InputStream stream = zipFile.getInputStream(syn0); Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry syn1 = zipFile.getEntry("syn1.txt"); stream = zipFile.getInputStream(syn1); Files.copy(stream, Paths.get(tmpFileSyn1.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry codes = zipFile.getEntry("codes.txt"); stream = zipFile.getInputStream(codes); Files.copy(stream, Paths.get(tmpFileC.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry huffman = zipFile.getEntry("huffman.txt"); stream = zipFile.getInputStream(huffman); Files.copy(stream, Paths.get(tmpFileH.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry config = zipFile.getEntry("config.json"); stream = zipFile.getInputStream(config); StringBuilder builder = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) { String line; while ((line = reader.readLine()) != null) { builder.append(line); } } VectorsConfiguration configuration = VectorsConfiguration.fromJson(builder.toString().trim()); // we read first 4 files as w2v model Word2Vec w2v = readWord2VecFromText(tmpFileSyn0, tmpFileSyn1, tmpFileC, tmpFileH, configuration); // we read frequencies from frequencies.txt, however it's possible that we might not have this file ZipEntry frequencies = zipFile.getEntry("frequencies.txt"); if (frequencies != null) { stream = zipFile.getInputStream(frequencies); try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) { String line; while ((line = reader.readLine()) != null) { String[] split = line.split(" "); VocabWord word = w2v.getVocab().tokenFor(decodeB64(split[0])); word.setElementFrequency((long) Double.parseDouble(split[1])); word.setSequencesCount((long) Double.parseDouble(split[2])); } } } ZipEntry zsyn1Neg = zipFile.getEntry("syn1Neg.txt"); if (zsyn1Neg != null) { stream = zipFile.getInputStream(zsyn1Neg); try (InputStreamReader isr = new InputStreamReader(stream); BufferedReader reader = new BufferedReader(isr)) { String line = null; List<INDArray> rows = new ArrayList<>(); while ((line = reader.readLine()) != null) { String[] split = line.split(" "); double array[] = new double[split.length]; for (int i = 0; i < split.length; i++) { array[i] = Double.parseDouble(split[i]); } rows.add(Nd4j.create(array)); } // it's possible to have full model without syn1Neg if (rows.size() > 0) { INDArray syn1Neg = Nd4j.vstack(rows); ((InMemoryLookupTable) w2v.getLookupTable()).setSyn1Neg(syn1Neg); } } } return w2v; } finally { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); } }
From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java
/** * This method restores previously saved w2v model. File can be in one of the following formats: * 1) Binary model, either compressed or not. Like well-known Google Model * 2) Popular CSV word2vec text format//from ww w. j a v a2 s .c om * 3) DL4j compressed format * * In return you get StaticWord2Vec model, which might be used as lookup table only in multi-gpu environment. * * @param file File should point to previously saved w2v model * @return */ // TODO: this method needs better name :) public static WordVectors loadStaticModel(File file) { if (!file.exists() || file.isDirectory()) throw new RuntimeException( new FileNotFoundException("File [" + file.getAbsolutePath() + "] was not found")); int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency(); boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive(); if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(false); Nd4j.getMemoryManager().setOccasionalGcFrequency(50000); CompressedRamStorage<Integer> storage = new CompressedRamStorage.Builder<Integer>() .useInplaceCompression(false).setCompressor(new NoOp()).emulateIsAbsent(false).build(); VocabCache<VocabWord> vocabCache = new AbstractCache.Builder<VocabWord>().build(); // now we need to define which file format we have here // if zip - that's dl4j format try { log.debug("Trying DL4j format..."); File tmpFileSyn0 = File.createTempFile("word2vec", "syn"); ZipFile zipFile = new ZipFile(file); ZipEntry syn0 = zipFile.getEntry("syn0.txt"); InputStream stream = zipFile.getInputStream(syn0); Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); storage.clear(); try (Reader reader = new CSVReader(tmpFileSyn0)) { while (reader.hasNext()) { Pair<VocabWord, float[]> pair = reader.next(); VocabWord word = pair.getFirst(); storage.store(word.getIndex(), pair.getSecond()); vocabCache.addToken(word); vocabCache.addWordToIndex(word.getIndex(), word.getLabel()); Nd4j.getMemoryManager().invokeGcOccasionally(); } } catch (Exception e) { throw new RuntimeException(e); } finally { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); } } catch (Exception e) { // try { // try to load file as text csv vocabCache = new AbstractCache.Builder<VocabWord>().build(); storage.clear(); log.debug("Trying CSVReader..."); try (Reader reader = new CSVReader(file)) { while (reader.hasNext()) { Pair<VocabWord, float[]> pair = reader.next(); VocabWord word = pair.getFirst(); storage.store(word.getIndex(), pair.getSecond()); vocabCache.addToken(word); vocabCache.addWordToIndex(word.getIndex(), word.getLabel()); Nd4j.getMemoryManager().invokeGcOccasionally(); } } catch (Exception ef) { // we throw away this exception, and trying to load data as binary model throw new RuntimeException(ef); } finally { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); } } catch (Exception ex) { // otherwise it's probably google model. which might be compressed or not log.debug("Trying BinaryReader..."); vocabCache = new AbstractCache.Builder<VocabWord>().build(); storage.clear(); try (Reader reader = new BinaryReader(file)) { while (reader.hasNext()) { Pair<VocabWord, float[]> pair = reader.next(); VocabWord word = pair.getFirst(); storage.store(word.getIndex(), pair.getSecond()); vocabCache.addToken(word); vocabCache.addWordToIndex(word.getIndex(), word.getLabel()); Nd4j.getMemoryManager().invokeGcOccasionally(); } } catch (Exception ez) { throw new RuntimeException("Unable to guess input file format"); } finally { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); } } finally { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); } } StaticWord2Vec word2Vec = new StaticWord2Vec.Builder(storage, vocabCache).build(); return word2Vec; }
From source file:org.broad.igv.feature.genome.GenomeManager.java
/** * Gets a list of all the locally cached genome archive files that * IGV knows about./*ww w . java2s . c o m*/ * * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ private List<GenomeListItem> getCachedGenomeArchiveList() throws IOException { if (cachedGenomeArchiveList == null) { cachedGenomeArchiveList = new LinkedList<GenomeListItem>(); if (!DirectoryManager.getGenomeCacheDirectory().exists()) { return cachedGenomeArchiveList; } File[] files = DirectoryManager.getGenomeCacheDirectory().listFiles(); for (File file : files) { if (file.isDirectory()) { continue; } if (!file.getName().toLowerCase().endsWith(Globals.GENOME_FILE_EXTENSION)) { continue; } ZipFile zipFile = null; FileInputStream fis = null; ZipInputStream zipInputStream = null; try { zipFile = new ZipFile(file); fis = new FileInputStream(file); zipInputStream = new ZipInputStream(new BufferedInputStream(fis)); ZipEntry zipEntry = zipFile.getEntry(Globals.GENOME_ARCHIVE_PROPERTY_FILE_NAME); if (zipEntry == null) { continue; // Should never happen } InputStream inputStream = zipFile.getInputStream(zipEntry); Properties properties = new Properties(); properties.load(inputStream); int version = 0; if (properties.containsKey(Globals.GENOME_ARCHIVE_VERSION_KEY)) { try { version = Integer.parseInt(properties.getProperty(Globals.GENOME_ARCHIVE_VERSION_KEY)); } catch (Exception e) { log.error("Error parsing genome version: " + version, e); } } GenomeListItem item = new GenomeListItem( properties.getProperty(Globals.GENOME_ARCHIVE_NAME_KEY), file.getAbsolutePath(), properties.getProperty(Globals.GENOME_ARCHIVE_ID_KEY)); cachedGenomeArchiveList.add(item); } catch (ZipException ex) { log.error("\nZip error unzipping cached genome.", ex); try { file.delete(); zipInputStream.close(); } catch (Exception e) { //ignore exception when trying to delete file } } catch (IOException ex) { log.warn("\nIO error unzipping cached genome.", ex); try { file.delete(); } catch (Exception e) { //ignore exception when trying to delete file } } finally { try { if (zipInputStream != null) { zipInputStream.close(); } if (zipFile != null) { zipFile.close(); } if (fis != null) { fis.close(); } } catch (IOException ex) { log.warn("Error closing genome zip stream!", ex); } } } } return cachedGenomeArchiveList; }