List of usage examples for java.util.zip ZipFile getEntry
public ZipEntry getEntry(String name)
From source file:com.edgenius.wiki.service.impl.BackupServiceImpl.java
public String getFileComment(File zipFile) { String comment = ""; ZipFile zip = null; try {/*from w ww . ja v a 2 s. com*/ zip = new ZipFile(zipFile); ZipEntry entry = zip.getEntry(COMMENT_FILE_NAME); if (entry != null) { comment = IOUtils.toString(zip.getInputStream(entry)); } } catch (Exception e) { log.info("backup/restore file comment not available:" + zipFile.getAbsolutePath()); } finally { if (zip != null) try { zip.close(); } catch (Exception e) { } } return comment; }
From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java
/** * This method// w ww . j a v a2s .co m * 1) Binary model, either compressed or not. Like well-known Google Model * 2) Popular CSV word2vec text format * 3) DL4j compressed format * * Please note: if extended data isn't available, only weights will be loaded instead. * * @param file * @param extendedModel if TRUE, we'll try to load HS states & Huffman tree info, if FALSE, only weights will be loaded * @return */ public static Word2Vec readWord2VecModel(@NonNull File file, boolean extendedModel) { InMemoryLookupTable<VocabWord> lookupTable = new InMemoryLookupTable<>(); AbstractCache<VocabWord> vocabCache = new AbstractCache<>(); Word2Vec vec; INDArray syn0 = null; VectorsConfiguration configuration = new VectorsConfiguration(); if (!file.exists() || !file.isFile()) throw new ND4JIllegalStateException("File [" + file.getAbsolutePath() + "] doesn't exist"); int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency(); boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive(); if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(false); Nd4j.getMemoryManager().setOccasionalGcFrequency(50000); // try to load zip format try { if (extendedModel) { log.debug("Trying full model restoration..."); // this method just loads full compressed model if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); return readWord2Vec(file); } else { log.debug("Trying simplified model restoration..."); File tmpFileSyn0 = File.createTempFile("word2vec", "syn"); File tmpFileConfig = File.createTempFile("word2vec", "config"); // we don't need full model, so we go directly to syn0 file ZipFile zipFile = new ZipFile(file); ZipEntry syn = zipFile.getEntry("syn0.txt"); InputStream stream = zipFile.getInputStream(syn); Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); // now we're restoring configuration saved earlier ZipEntry config = zipFile.getEntry("config.json"); if (config != null) { stream = zipFile.getInputStream(config); StringBuilder builder = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) { String line; while ((line = reader.readLine()) != null) { builder.append(line); } } configuration = VectorsConfiguration.fromJson(builder.toString().trim()); } ZipEntry ve = zipFile.getEntry("frequencies.txt"); if (ve != null) { stream = zipFile.getInputStream(ve); AtomicInteger cnt = new AtomicInteger(0); try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) { String line; while ((line = reader.readLine()) != null) { String[] split = line.split(" "); VocabWord word = new VocabWord(Double.valueOf(split[1]), decodeB64(split[0])); word.setIndex(cnt.getAndIncrement()); word.incrementSequencesCount(Long.valueOf(split[2])); vocabCache.addToken(word); vocabCache.addWordToIndex(word.getIndex(), word.getLabel()); Nd4j.getMemoryManager().invokeGcOccasionally(); } } } List<INDArray> rows = new ArrayList<>(); // basically read up everything, call vstacl and then return model try (Reader reader = new CSVReader(tmpFileSyn0)) { AtomicInteger cnt = new AtomicInteger(0); while (reader.hasNext()) { Pair<VocabWord, float[]> pair = reader.next(); VocabWord word = pair.getFirst(); INDArray vector = Nd4j.create(pair.getSecond()); if (ve != null) { if (syn0 == null) syn0 = Nd4j.create(vocabCache.numWords(), vector.length()); syn0.getRow(cnt.getAndIncrement()).assign(vector); } else { rows.add(vector); vocabCache.addToken(word); vocabCache.addWordToIndex(word.getIndex(), word.getLabel()); } Nd4j.getMemoryManager().invokeGcOccasionally(); } } catch (Exception e) { throw new RuntimeException(e); } finally { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); } if (syn0 == null && vocabCache.numWords() > 0) syn0 = Nd4j.vstack(rows); if (syn0 == null) { log.error("Can't build syn0 table"); throw new DL4JInvalidInputException("Can't build syn0 table"); } lookupTable = new InMemoryLookupTable.Builder<VocabWord>().cache(vocabCache) .vectorLength(syn0.columns()).useHierarchicSoftmax(false).useAdaGrad(false).build(); lookupTable.setSyn0(syn0); try { tmpFileSyn0.delete(); tmpFileConfig.delete(); } catch (Exception e) { // } } } catch (Exception e) { // let's try to load this file as csv file try { log.debug("Trying CSV model restoration..."); Pair<InMemoryLookupTable, VocabCache> pair = loadTxt(file); lookupTable = pair.getFirst(); vocabCache = (AbstractCache<VocabWord>) pair.getSecond(); } catch (Exception ex) { // we fallback to trying binary model instead try { log.debug("Trying binary model restoration..."); if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); vec = loadGoogleModel(file, true, true); return vec; } catch (Exception ey) { // try to load without linebreaks try { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); vec = loadGoogleModel(file, true, false); return vec; } catch (Exception ez) { throw new RuntimeException( "Unable to guess input file format. Please use corresponding loader directly"); } } } } Word2Vec.Builder builder = new Word2Vec.Builder(configuration).lookupTable(lookupTable).useAdaGrad(false) .vocabCache(vocabCache).layerSize(lookupTable.layerSize()) // we don't use hs here, because model is incomplete .useHierarchicSoftmax(false).resetModel(false); /* Trying to restore TokenizerFactory & TokenPreProcessor */ TokenizerFactory factory = getTokenizerFactory(configuration); if (factory != null) builder.tokenizerFactory(factory); vec = builder.build(); return vec; }
From source file:org.paxle.core.doc.impl.BasicDocumentFactoryTest.java
public void testLoadUnmarshalledCommand() throws IOException, ParseException { final ZipFile zf = new ZipFile(new File("src/test/resources/command.zip")); // process attachments final Map<String, DataHandler> attachments = new HashMap<String, DataHandler>(); for (Enumeration<? extends ZipEntry> entries = zf.entries(); entries.hasMoreElements();) { final ZipEntry entry = entries.nextElement(); final String name = entry.getName(); if (name.equals("command.xml")) continue; // create a data-source to load the attachment final DataSource source = new DataSource() { private ZipFile zip = zf; private ZipEntry zipEntry = entry; public String getContentType() { return "application/x-java-serialized-object"; }/*from w w w . j a v a 2 s.co m*/ public InputStream getInputStream() throws IOException { return this.zip.getInputStream(this.zipEntry); } public String getName() { return this.zipEntry.getName(); } public OutputStream getOutputStream() throws IOException { throw new UnsupportedOperationException(); } }; final DataHandler handler = new DataHandler(source); attachments.put(name, handler); } // process command final ZipEntry commandEntry = zf.getEntry("command.xml"); final InputStream commandInput = zf.getInputStream(commandEntry); // marshal command TeeInputStream input = new TeeInputStream(commandInput, System.out); final ICommand cmd1 = this.docFactory.unmarshal(input, attachments); assertNotNull(cmd1); zf.close(); final ICommand cmd2 = this.createTestCommand(); assertEquals(cmd2, cmd1); }
From source file:org.nuxeo.launcher.connect.ConnectBroker.java
protected Map<String, PackageDefinition> getDistributionDefinitions(List<String> md5Filenames) { Map<String, PackageDefinition> allDefinitions = new HashMap<>(); if (md5Filenames == null) { return allDefinitions; }//from w ww . j a v a 2 s. c om for (String md5Filename : md5Filenames) { File md5File = new File(distributionMPDir, md5Filename); if (!md5File.exists()) { // distribution file has been deleted continue; } ZipFile zipFile; try { zipFile = new ZipFile(md5File); } catch (ZipException e) { log.warn("Unzip error reading file " + md5File, e); continue; } catch (IOException e) { log.warn("Could not read file " + md5File, e); continue; } try { ZipEntry zipEntry = zipFile.getEntry("package.xml"); InputStream in = zipFile.getInputStream(zipEntry); PackageDefinition pd = NuxeoConnectClient.getPackageUpdateService().loadPackage(in); allDefinitions.put(md5Filename, pd); } catch (Exception e) { log.error("Could not read package description", e); continue; } finally { try { zipFile.close(); } catch (IOException e) { log.warn("Unexpected error closing file " + md5File, e); } } } return allDefinitions; }
From source file:net.morematerials.manager.UpdateManager.java
private void updateSmp(File file) throws Exception { ZipFile smpFile = new ZipFile(file); Enumeration<? extends ZipEntry> entries = smpFile.entries(); String smpName = file.getName().substring(0, file.getName().lastIndexOf(".")); // First we need to know what files are in this .smp file, because the old format uses magic filename matching. ArrayList<String> containedFiles = new ArrayList<String>(); HashMap<String, YamlConfiguration> materials = new HashMap<String, YamlConfiguration>(); // Now we walk through the file once and store every file. ZipEntry entry;/* w w w . j a va 2s .c o m*/ YamlConfiguration yaml; while (entries.hasMoreElements()) { entry = entries.nextElement(); // Only if its a .yml file if (entry.getName().endsWith(".yml")) { // Load the .yml file yaml = new YamlConfiguration(); yaml.load(smpFile.getInputStream(entry)); // Texture is required for new package format. if (!yaml.contains("Texture")) { materials.put(entry.getName().substring(0, entry.getName().lastIndexOf(".")), yaml); } else { containedFiles.add(entry.getName()); } } else { containedFiles.add(entry.getName()); } } // If this map contains any entry, we need to convert something. if (materials.size() > 0) { this.plugin.getUtilsManager().log("Deprecated .smp found: " + file.getName() + ". Updating..."); // We need a temporary directory to update the .smp in. this.tempDir.mkdir(); // First extract all untouched assets: for (String filename : containedFiles) { InputStream in = smpFile.getInputStream(smpFile.getEntry(filename)); OutputStream out = new FileOutputStream(new File(this.tempDir, filename)); int read; byte[] bytes = new byte[1024]; while ((read = in.read(bytes)) != -1) { out.write(bytes, 0, read); } out.flush(); out.close(); in.close(); } // Now convert each .yml file in this archive. YamlConfiguration oldYaml; YamlConfiguration newYaml; for (String materialName : materials.keySet()) { oldYaml = materials.get(materialName); newYaml = new YamlConfiguration(); // Required "Type" which is Block or Item. Old format didnt support Tools anyway. newYaml.set("Type", oldYaml.getString("Type")); // Title is now required and falls back to filename. newYaml.set("Title", oldYaml.getString("Title", materialName)); // Now call the converter methods. if (newYaml.getString("Type").equals("Block")) { this.convertBlock(oldYaml, newYaml, materialName, containedFiles); this.convertBlockHandlers(oldYaml, newYaml); } else if (newYaml.getString("Type").equals("Item")) { this.convertItem(oldYaml, newYaml, materialName, containedFiles); this.convertItemHandlers(oldYaml, newYaml); } // Copy over recipes - nothing changed here! if (oldYaml.contains("Recipes")) { newYaml.set("Recipes", oldYaml.getList("Recipes")); } // Finally store the new .yml file. String yamlString = newYaml.saveToString(); BufferedWriter out = new BufferedWriter( new FileWriter(new File(this.tempDir, materialName + ".yml"))); out.write(this.fixYamlProblems(yamlString)); out.close(); // Also update itemmap entry! for (Integer i = 0; i < this.itemMap.size(); i++) { String oldMaterial = this.itemMap.get(i).replaceAll("^[0-9]+:MoreMaterials.", ""); if (oldMaterial.equals(newYaml.getString("Title"))) { this.itemMap.set(i, this.itemMap.get(i).replaceAll("^([0-9]+:MoreMaterials.).+$", "$1" + smpName + "." + materialName)); break; } } // And we need to tell SpoutPlugin that this material must be renamed! SpoutManager.getMaterialManager().renameMaterialKey(this.plugin, newYaml.getString("Title"), smpName + "." + materialName); } // First remove old .smp file smpFile.close(); file.delete(); // Then repack the new .smp file ZipOutputStream out = new ZipOutputStream(new FileOutputStream(file)); for (File entryFile : this.tempDir.listFiles()) { FileInputStream in = new FileInputStream(entryFile); out.putNextEntry(new ZipEntry(entryFile.getName())); Integer len; byte[] buf = new byte[1024]; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } out.closeEntry(); in.close(); } out.close(); // At last remove the temp directory. FileUtils.deleteDirectory(this.tempDir); } else { // At last, close the file handle. smpFile.close(); } }
From source file:mondrian.test.loader.MondrianFoodMartLoader.java
/** * Open the file of INSERT statements to load the data. Default * file name is ./demo/FoodMartCreateData.zip * * @return FileInputStream/* w w w.j a va 2 s .c o m*/ */ private InputStream openInputStream() throws Exception { String defaultZipFileName = getClass().getResource("FoodMartCreateData.zip").getPath(); final String defaultDataFileName = "FoodMartCreateData.sql"; final File file = (inputFile != null) ? new File(inputFile) : new File(defaultZipFileName); if (!file.exists()) { LOGGER.error("No input file: " + file); return null; } if (file.getName().toLowerCase().endsWith(".zip")) { ZipFile zippedData = new ZipFile(file); ZipEntry entry = zippedData.getEntry(defaultDataFileName); return zippedData.getInputStream(entry); } else { return new FileInputStream(file); } }
From source file:it.readbeyond.minstrel.unzipper.Unzipper.java
private String unzip(String inputZip, String destinationDirectory, String mode, JSONObject parameters) throws IOException, JSONException { // store the zip entries to decompress List<String> list = new ArrayList<String>(); // store the zip entries actually decompressed List<String> decompressed = new ArrayList<String>(); // open input zip file File sourceZipFile = new File(inputZip); ZipFile zipFile = new ZipFile(sourceZipFile, ZipFile.OPEN_READ); // open destination directory, creating it if needed File unzipDestinationDirectory = new File(destinationDirectory); unzipDestinationDirectory.mkdirs();// w w w .j a v a 2 s .co m // extract all files if (mode.equals(ARGUMENT_MODE_ALL)) { Enumeration<? extends ZipEntry> zipFileEntries = zipFile.entries(); while (zipFileEntries.hasMoreElements()) { list.add(zipFileEntries.nextElement().getName()); } } // extract all files except audio and video // (determined by file extension) if (mode.equals(ARGUMENT_MODE_ALL_NON_MEDIA)) { String[] excludeExtensions = JSONArrayToStringArray( parameters.optJSONArray(ARGUMENT_ARGS_EXCLUDE_EXTENSIONS)); Enumeration<? extends ZipEntry> zipFileEntries = zipFile.entries(); while (zipFileEntries.hasMoreElements()) { String name = zipFileEntries.nextElement().getName(); String lower = name.toLowerCase(); if (!isFile(lower, excludeExtensions)) { list.add(name); } } } // extract all small files // maximum size is passed in args parameter // or, if not passed, defaults to const DEFAULT_MAXIMUM_SIZE_FILE if (mode.equals(ARGUMENT_MODE_ALL_SMALL)) { long maximum_size = parameters.optLong(ARGUMENT_ARGS_MAXIMUM_FILE_SIZE, DEFAULT_MAXIMUM_SIZE_FILE); Enumeration<? extends ZipEntry> zipFileEntries = zipFile.entries(); while (zipFileEntries.hasMoreElements()) { ZipEntry ze = zipFileEntries.nextElement(); if (ze.getSize() <= maximum_size) { list.add(ze.getName()); } } } // extract only the requested files if (mode.equals(ARGUMENT_MODE_SELECTED)) { String[] entries = JSONArrayToStringArray(parameters.optJSONArray(ARGUMENT_ARGS_ENTRIES)); for (String entry : entries) { ZipEntry ze = zipFile.getEntry(entry); if (ze != null) { list.add(entry); } } } // extract all "structural" files if (mode.equals(ARGUMENT_MODE_ALL_STRUCTURE)) { String[] extensions = JSONArrayToStringArray(parameters.optJSONArray(ARGUMENT_ARGS_EXTENSIONS)); Enumeration<? extends ZipEntry> zipFileEntries = zipFile.entries(); while (zipFileEntries.hasMoreElements()) { String name = zipFileEntries.nextElement().getName(); String lower = name.toLowerCase(); boolean extract = isFile(lower, extensions); if (extract) { list.add(name); } } } // NOTE list contains only valid zip entries // perform unzip for (String currentEntry : list) { ZipEntry entry = zipFile.getEntry(currentEntry); File destFile = new File(unzipDestinationDirectory, currentEntry); File destinationParent = destFile.getParentFile(); destinationParent.mkdirs(); if (!entry.isDirectory()) { BufferedInputStream is = new BufferedInputStream(zipFile.getInputStream(entry)); int numberOfBytesRead; byte data[] = new byte[BUFFER_SIZE]; FileOutputStream fos = new FileOutputStream(destFile); BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER_SIZE); while ((numberOfBytesRead = is.read(data, 0, BUFFER_SIZE)) > -1) { dest.write(data, 0, numberOfBytesRead); } dest.flush(); dest.close(); is.close(); fos.close(); decompressed.add(currentEntry); } } zipFile.close(); return stringify(decompressed); }
From source file:org.zeroturnaround.zip.ZipUtil.java
private static boolean archiveEqualsInternal(File f1, File f2) throws IOException { ZipFile zf1 = null;//from www . ja va 2s. c o m ZipFile zf2 = null; try { zf1 = new ZipFile(f1); zf2 = new ZipFile(f2); // Check the number of entries if (zf1.size() != zf2.size()) { log.debug("Number of entries changed (" + zf1.size() + " vs " + zf2.size() + ")."); return false; } /* * As there are same number of entries in both archives we can traverse * all entries of one of the archives and get the corresponding entries * from the other archive. * * If a corresponding entry is missing from the second archive the * archives are different and we finish the comparison. * * We guarantee that no entry of the second archive is skipped as there * are same number of unique entries in both archives. */ Enumeration en = zf1.entries(); while (en.hasMoreElements()) { ZipEntry e1 = (ZipEntry) en.nextElement(); String path = e1.getName(); ZipEntry e2 = zf2.getEntry(path); // Check meta data if (!metaDataEquals(path, e1, e2)) { return false; } // Check the content InputStream is1 = null; InputStream is2 = null; try { is1 = zf1.getInputStream(e1); is2 = zf2.getInputStream(e2); if (!IOUtils.contentEquals(is1, is2)) { log.debug("Entry '{}' content changed.", path); return false; } } finally { IOUtils.closeQuietly(is1); IOUtils.closeQuietly(is2); } } } finally { closeQuietly(zf1); closeQuietly(zf2); } log.debug("Archives are the same."); return true; }
From source file:com.izforge.izpack.compiler.CompilerConfig.java
private IXMLElement readRefPackData(String refFileName, boolean isselfcontained) throws CompilerException { File refXMLFile = new File(refFileName); if (!refXMLFile.isAbsolute()) { refXMLFile = new File(compilerData.getBasedir(), refFileName); }/*from www.j ava 2 s . c o m*/ if (!refXMLFile.canRead()) { throw new CompilerException("Invalid file: " + refXMLFile); } InputStream specin; if (isselfcontained) { if (!refXMLFile.getAbsolutePath().endsWith(".zip")) { throw new CompilerException( "Invalid file: " + refXMLFile + ". Selfcontained files can only be of type zip."); } ZipFile zip; try { zip = new ZipFile(refXMLFile, ZipFile.OPEN_READ); ZipEntry specentry = zip.getEntry("META-INF/izpack.xml"); specin = zip.getInputStream(specentry); } catch (IOException e) { throw new CompilerException("Error reading META-INF/izpack.xml in " + refXMLFile); } } else { try { specin = new FileInputStream(refXMLFile.getAbsolutePath()); } catch (FileNotFoundException e) { throw new CompilerException("FileNotFoundException exception while reading refXMLFile"); } } IXMLParser refXMLParser = new XMLParser(); // We get it IXMLElement refXMLData = refXMLParser.parse(specin, refXMLFile.getAbsolutePath()); // Now checked the loaded XML file for basic syntax // We check it if (!"installation".equalsIgnoreCase(refXMLData.getName())) { assertionHelper.parseError(refXMLData, "this is not an IzPack XML installation file"); } if (!CompilerData.VERSION.equalsIgnoreCase(xmlCompilerHelper.requireAttribute(refXMLData, "version"))) { assertionHelper.parseError(refXMLData, "the file version is different from the compiler version"); } // Read the properties and perform replacement on the rest of the tree substituteProperties(refXMLData); // call addResources to add the referenced XML resources to this installation addResources(refXMLData); try { specin.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return refXMLData; }
From source file:org.opencms.setup.CmsSetupBean.java
/** * Reads all properties from the components.properties file at the given location, a folder or a zip file.<p> * /*w w w . jav a 2 s .c om*/ * @param location the location to read the properties from * * @return the read properties * * @throws FileNotFoundException if the properties file could not be found * @throws CmsConfigurationException if the something else goes wrong */ protected CmsParameterConfiguration getComponentsProperties(String location) throws FileNotFoundException, CmsConfigurationException { InputStream stream = null; ZipFile zipFile = null; try { // try to interpret the fileName as a folder File folder = new File(location); // if it is a file it must be a zip-file if (folder.isFile()) { zipFile = new ZipFile(location); ZipEntry entry = zipFile.getEntry(COMPONENTS_PROPERTIES); // path to file might be relative, too if ((entry == null) && location.startsWith("/")) { entry = zipFile.getEntry(location.substring(1)); } if (entry == null) { throw new FileNotFoundException(org.opencms.importexport.Messages.get().getBundle().key( org.opencms.importexport.Messages.LOG_IMPORTEXPORT_FILE_NOT_FOUND_IN_ZIP_1, location + "/" + COMPONENTS_PROPERTIES)); } stream = zipFile.getInputStream(entry); } else { // it is a folder File file = new File(folder, COMPONENTS_PROPERTIES); stream = new FileInputStream(file); } return new CmsParameterConfiguration(stream); } catch (Throwable ioe) { if (stream != null) { try { stream.close(); } catch (IOException e) { if (LOG.isDebugEnabled()) { LOG.debug(e.getLocalizedMessage(), e); } } } if (zipFile != null) { try { zipFile.close(); } catch (IOException e) { if (LOG.isDebugEnabled()) { LOG.debug(e.getLocalizedMessage(), e); } } } if (ioe instanceof FileNotFoundException) { throw (FileNotFoundException) ioe; } CmsMessageContainer msg = org.opencms.importexport.Messages.get().container( org.opencms.importexport.Messages.ERR_IMPORTEXPORT_ERROR_READING_FILE_1, location + "/" + COMPONENTS_PROPERTIES); if (LOG.isErrorEnabled()) { LOG.error(msg.key(), ioe); } throw new CmsConfigurationException(msg, ioe); } }