Example usage for java.util.zip ZipInputStream getNextEntry

List of usage examples for java.util.zip ZipInputStream getNextEntry

Introduction

In this page you can find the example usage for java.util.zip ZipInputStream getNextEntry.

Prototype

public ZipEntry getNextEntry() throws IOException 

Source Link

Document

Reads the next ZIP file entry and positions the stream at the beginning of the entry data.

Usage

From source file:com.yahoo.parsec.gradle.utils.FileUtils.java

/**
 * List dir file paths.//from   ww w.j  av a 2  s .c  o  m
 *
 * @param directory directory
 * @return List of file paths
 * @throws IOException IOException
 */
public List<Path> listDirFilePaths(String directory) throws IOException {
    CodeSource src = getClass().getProtectionDomain().getCodeSource();
    List<Path> paths = new ArrayList<>();

    try {
        if (src != null) {
            URL jar = src.getLocation();
            ZipInputStream zip = new ZipInputStream(jar.openStream());
            ZipEntry zipEntry;

            while ((zipEntry = zip.getNextEntry()) != null) {
                String entryName = zipEntry.getName();
                if (entryName.startsWith(directory + "/")) {
                    paths.add(Paths.get("/" + entryName));
                }
            }
        }
    } catch (IOException e) {
        throw e;
    }

    return paths;
}

From source file:it.jnrpe.plugins.factory.CPluginFactory.java

/**
 * @deprecated//from  ww  w .  j ava 2 s .c  om
 */
private void configurePlugins(File fDir) {
    m_Logger.trace("READING PLUGIN CONFIGURATION FROM DIRECTORY " + fDir.getName());
    CStreamManager streamMgr = new CStreamManager();
    File[] vfJars = fDir.listFiles(new FileFilter() {

        public boolean accept(File f) {
            return f.getName().endsWith(".jar");
        }

    });

    // Initializing classloader
    URL[] urls = new URL[vfJars.length];
    URLClassLoader ul = null;

    for (int j = 0; j < vfJars.length; j++) {
        try {
            urls[j] = vfJars[j].toURI().toURL();
        } catch (MalformedURLException e) {
            // should never happen
        }
    }

    ul = URLClassLoader.newInstance(urls);

    for (int i = 0; i < vfJars.length; i++) {
        File file = vfJars[i];

        try {
            m_Logger.info("READING PLUGINS DATA IN FILE '" + file.getName() + "'");

            ZipInputStream jin = (ZipInputStream) streamMgr
                    .handle(new ZipInputStream(new FileInputStream(file)));
            ZipEntry ze = null;

            while ((ze = jin.getNextEntry()) != null) {
                if (ze.getName().equals("plugin.xml")) {
                    parsePluginXmlFile(jin);
                    break;
                }
            }
        } catch (Exception e) {
            m_Logger.error("UNABLE TO READ DATA FROM FILE '" + file.getName() + "'. THE FILE WILL BE IGNORED.",
                    e);
        } finally {
            streamMgr.closeAll();
        }

    }
}

From source file:com.github.nethad.clustermeister.integration.JPPFTestNode.java

private void unzipNode(InputStream fileToUnzip, File targetDir) {
    //        Enumeration entries;
    ZipInputStream zipFile;
    try {/*from  ww  w .ja v  a 2 s . c o  m*/
        zipFile = new ZipInputStream(fileToUnzip);
        ZipEntry entry;
        while ((entry = zipFile.getNextEntry()) != null) {
            //                ZipEntry entry = (ZipEntry) entries.nextElement();
            if (entry.isDirectory()) {
                // Assume directories are stored parents first then children.
                System.err.println("Extracting directory: " + entry.getName());
                // This is not robust, just for demonstration purposes.
                (new File(targetDir, entry.getName())).mkdir();
                continue;
            }
            System.err.println("Extracting file: " + entry.getName());
            File targetFile = new File(targetDir, entry.getName());
            copyInputStream_notClosing(zipFile, new BufferedOutputStream(new FileOutputStream(targetFile)));
            //                zipFile.closeEntry();
        }
        zipFile.close();
    } catch (IOException ioe) {
        System.err.println("Unhandled exception:");
        ioe.printStackTrace();
    }
}

From source file:com.metamx.druid.loading.S3ZippedSegmentGetter.java

@Override
public File getSegmentFiles(Map<String, Object> loadSpec) throws StorageAdapterLoadingException {
    String s3Bucket = MapUtils.getString(loadSpec, "bucket");
    String s3Path = MapUtils.getString(loadSpec, "key");

    if (s3Path.startsWith("/")) {
        s3Path = s3Path.substring(1);
    }//from w w  w. j  a  v a  2  s .  c  o m

    log.info("Loading index at path[s3://%s/%s]", s3Bucket, s3Path);

    S3Object s3Obj = null;
    File tmpFile = null;
    try {
        if (!s3Client.isObjectInBucket(s3Bucket, s3Path)) {
            throw new StorageAdapterLoadingException("IndexFile[s3://%s/%s] does not exist.", s3Bucket, s3Path);
        }

        File cacheFile = new File(config.getCacheDirectory(), computeCacheFilePath(s3Bucket, s3Path));

        if (cacheFile.exists()) {
            S3Object objDetails = s3Client.getObjectDetails(new S3Bucket(s3Bucket), s3Path);
            DateTime cacheFileLastModified = new DateTime(cacheFile.lastModified());
            DateTime s3ObjLastModified = new DateTime(objDetails.getLastModifiedDate().getTime());
            if (cacheFileLastModified.isAfter(s3ObjLastModified)) {
                log.info("Found cacheFile[%s] with modified[%s], which is after s3Obj[%s].  Using.", cacheFile,
                        cacheFileLastModified, s3ObjLastModified);
                return cacheFile;
            }
            FileUtils.deleteDirectory(cacheFile);
        }

        long currTime = System.currentTimeMillis();

        tmpFile = File.createTempFile(s3Bucket, new DateTime().toString());
        log.info("Downloading file[s3://%s/%s] to local tmpFile[%s] for cacheFile[%s]", s3Bucket, s3Path,
                tmpFile, cacheFile);

        s3Obj = s3Client.getObject(new S3Bucket(s3Bucket), s3Path);
        StreamUtils.copyToFileAndClose(s3Obj.getDataInputStream(), tmpFile);
        final long downloadEndTime = System.currentTimeMillis();
        log.info("Download of file[%s] completed in %,d millis", cacheFile, downloadEndTime - currTime);

        if (cacheFile.exists()) {
            FileUtils.deleteDirectory(cacheFile);
        }
        cacheFile.mkdirs();

        ZipInputStream zipIn = null;
        OutputStream out = null;
        ZipEntry entry = null;
        try {
            zipIn = new ZipInputStream(new BufferedInputStream(new FileInputStream(tmpFile)));
            while ((entry = zipIn.getNextEntry()) != null) {
                out = new FileOutputStream(new File(cacheFile, entry.getName()));
                IOUtils.copy(zipIn, out);
                zipIn.closeEntry();
                Closeables.closeQuietly(out);
                out = null;
            }
        } finally {
            Closeables.closeQuietly(out);
            Closeables.closeQuietly(zipIn);
        }

        long endTime = System.currentTimeMillis();
        log.info("Local processing of file[%s] done in %,d millis", cacheFile, endTime - downloadEndTime);

        log.info("Deleting tmpFile[%s]", tmpFile);
        tmpFile.delete();

        return cacheFile;
    } catch (Exception e) {
        throw new StorageAdapterLoadingException(e, e.getMessage());
    } finally {
        S3Utils.closeStreamsQuietly(s3Obj);
        if (tmpFile != null && tmpFile.exists()) {
            log.warn("Deleting tmpFile[%s] in finally block.  Why?", tmpFile);
            tmpFile.delete();
        }
    }
}

From source file:io.sledge.core.impl.extractor.SledgeApplicationPackageExtractor.java

@Override
public Map<String, InputStream> getPackages(ApplicationPackage appPackage) {
    Map<String, InputStream> packages = new HashMap<>();
    ZipInputStream zipStream = getNewUtf8ZipInputStream(appPackage);

    try {//from   www .j  ava  2 s  . c o  m
        byte[] buffer = new byte[2048];
        ZipEntry zipEntry = null;

        while ((zipEntry = zipStream.getNextEntry()) != null) {

            if (zipEntry.isDirectory()) {
                zipStream.closeEntry();
                continue;
            }

            if (zipEntry.getName().startsWith("packages/")) {
                ByteArrayOutputStream output = new ByteArrayOutputStream();

                int length;
                while ((length = zipStream.read(buffer, 0, buffer.length)) >= 0) {
                    output.write(buffer, 0, length);
                }

                String packageFileName = zipEntry.getName().replace("packages/", "");
                packages.put(packageFileName, new ByteArrayInputStream(output.toByteArray()));

                zipStream.closeEntry();
            }
        }
    } catch (IOException e) {
        log.error(e.getMessage(), e);
    } finally {
        try {
            zipStream.close();
            appPackage.getPackageFile().reset();
        } catch (IOException e) {
            log.error(e.getMessage(), e);
        }
    }

    return packages;
}

From source file:com.metamx.druid.loading.S3ZippedSegmentPuller.java

@Override
public File getSegmentFiles(DataSegment segment) throws StorageAdapterLoadingException {
    Map<String, Object> loadSpec = segment.getLoadSpec();
    String s3Bucket = MapUtils.getString(loadSpec, "bucket");
    String s3Path = MapUtils.getString(loadSpec, "key");

    if (s3Path.startsWith("/")) {
        s3Path = s3Path.substring(1);
    }//from ww  w  .  j av a  2s .  c  om

    log.info("Loading index at path[s3://%s/%s]", s3Bucket, s3Path);

    S3Object s3Obj = null;
    File tmpFile = null;
    try {
        if (!s3Client.isObjectInBucket(s3Bucket, s3Path)) {
            throw new StorageAdapterLoadingException("IndexFile[s3://%s/%s] does not exist.", s3Bucket, s3Path);
        }

        File cacheFile = new File(config.getCacheDirectory(), computeCacheFilePath(s3Bucket, s3Path));

        if (cacheFile.exists()) {
            S3Object objDetails = s3Client.getObjectDetails(new S3Bucket(s3Bucket), s3Path);
            DateTime cacheFileLastModified = new DateTime(cacheFile.lastModified());
            DateTime s3ObjLastModified = new DateTime(objDetails.getLastModifiedDate().getTime());
            if (cacheFileLastModified.isAfter(s3ObjLastModified)) {
                log.info("Found cacheFile[%s] with modified[%s], which is after s3Obj[%s].  Using.", cacheFile,
                        cacheFileLastModified, s3ObjLastModified);
                return cacheFile;
            }
            FileUtils.deleteDirectory(cacheFile);
        }

        long currTime = System.currentTimeMillis();

        tmpFile = File.createTempFile(s3Bucket, new DateTime().toString());
        log.info("Downloading file[s3://%s/%s] to local tmpFile[%s] for cacheFile[%s]", s3Bucket, s3Path,
                tmpFile, cacheFile);

        s3Obj = s3Client.getObject(new S3Bucket(s3Bucket), s3Path);
        StreamUtils.copyToFileAndClose(s3Obj.getDataInputStream(), tmpFile);
        final long downloadEndTime = System.currentTimeMillis();
        log.info("Download of file[%s] completed in %,d millis", cacheFile, downloadEndTime - currTime);

        if (cacheFile.exists()) {
            FileUtils.deleteDirectory(cacheFile);
        }
        cacheFile.mkdirs();

        ZipInputStream zipIn = null;
        OutputStream out = null;
        ZipEntry entry = null;
        try {
            zipIn = new ZipInputStream(new BufferedInputStream(new FileInputStream(tmpFile)));
            while ((entry = zipIn.getNextEntry()) != null) {
                out = new FileOutputStream(new File(cacheFile, entry.getName()));
                IOUtils.copy(zipIn, out);
                zipIn.closeEntry();
                Closeables.closeQuietly(out);
                out = null;
            }
        } finally {
            Closeables.closeQuietly(out);
            Closeables.closeQuietly(zipIn);
        }

        long endTime = System.currentTimeMillis();
        log.info("Local processing of file[%s] done in %,d millis", cacheFile, endTime - downloadEndTime);

        log.info("Deleting tmpFile[%s]", tmpFile);
        tmpFile.delete();

        return cacheFile;
    } catch (Exception e) {
        throw new StorageAdapterLoadingException(e, e.getMessage());
    } finally {
        S3Utils.closeStreamsQuietly(s3Obj);
        if (tmpFile != null && tmpFile.exists()) {
            log.warn("Deleting tmpFile[%s] in finally block.  Why?", tmpFile);
            tmpFile.delete();
        }
    }
}

From source file:de.xirp.plugin.PluginManager.java

/**
 * Extracts files from the plugins jar./*  ww w  . j a  v a  2  s . c  om*/
 * 
 * @param info
 *            the information about the plugin
 * @param destination
 *            destination for extraction
 * @param comparer
 *            comparer which returns <code>0</code> if an
 *            element from the jar should be extracted
 * @param replace
 *            string of the elements path which should be deleted
 * @param deleteOnExit
 *            <code>true</code> if the extracted files should be
 *            deleted on exit of the application.
 * @return <code>false</code> if an error occurred while
 *         extraction
 */
private static boolean extractFromJar(PluginInfo info, String destination, Comparable<String> comparer,
        String replace, boolean deleteOnExit) {
    if (logClass.isTraceEnabled()) {
        logClass.trace(Constants.LINE_SEPARATOR + "Extracting for Plugin: " + info.getDefaultName() //$NON-NLS-1$
                + " to path " + destination + Constants.LINE_SEPARATOR); //$NON-NLS-1$
    }
    ZipInputStream zip = null;
    FileInputStream in = null;
    try {
        in = new FileInputStream(info.getAbsoluteJarPath());
        zip = new ZipInputStream(in);

        ZipEntry entry = null;
        while ((entry = zip.getNextEntry()) != null) {
            // relative name with slashes to separate dirnames.
            String elementName = entry.getName();
            // Check if it's an entry within Plugin Dir.
            // Only need to extract these

            if (comparer.compareTo(elementName) == 0) {
                // Remove Help Dir Name, because we don't like
                // to extract this parent dir
                elementName = elementName.replaceFirst(replace + JAR_SEPARATOR, "").trim(); //$NON-NLS-1$ 

                if (!elementName.equalsIgnoreCase("")) { //$NON-NLS-1$
                    // if parent dir for File does not exist,
                    // create
                    // it
                    File elementFile = new File(destination, elementName);
                    if (!elementFile.exists()) {
                        elementFile.getParentFile().mkdirs();
                        if (deleteOnExit) {
                            DeleteManager.deleteOnShutdown(elementFile);
                        }
                    }

                    // Only extract files, directorys are created
                    // above with mkdirs
                    if (!entry.isDirectory()) {
                        FileOutputStream fos = new FileOutputStream(elementFile);
                        byte[] buf = new byte[1024];
                        int len;
                        while ((len = zip.read(buf)) > 0) {
                            fos.write(buf, 0, len);
                        }
                        fos.close();
                        elementFile.setLastModified(entry.getTime());
                    }
                    logClass.trace("Extracted: " + elementName + Constants.LINE_SEPARATOR); //$NON-NLS-1$
                }
            }
            zip.closeEntry();
        }

    } catch (IOException e) {
        logClass.error("Error: " + e.getMessage() + Constants.LINE_SEPARATOR, e); //$NON-NLS-1$
        return false;
    } finally {
        if (zip != null) {
            try {
                zip.close();
            } catch (IOException e) {
                logClass.error("Error: " + e.getMessage() + Constants.LINE_SEPARATOR, e); //$NON-NLS-1$
            }
        }
        if (in != null) {
            try {
                in.close();
            } catch (IOException e) {
                logClass.error("Error: " + e.getMessage() + Constants.LINE_SEPARATOR, e); //$NON-NLS-1$
            }
        }
    }

    return true;
}

From source file:com.dbi.jmmerge.MapController.java

private Map<String, File> extractFilesFromZipUpload(MultipartFile file) throws IOException {
    Map<String, File> ret = new HashMap<>();
    String baseDirName = null;/* w  w w  .  j  a v  a2s.  co  m*/
    //First catalog it, to see what we've got.
    File temp = File.createTempFile("map", null);
    temp.deleteOnExit();
    file.transferTo(temp);
    ZipInputStream zipin = new ZipInputStream(new FileInputStream(temp));
    ZipEntry entry = zipin.getNextEntry();
    byte[] buf = new byte[1024];
    do {
        FileOutputStream out = null;
        String filename = entry.getName();
        if (isJunkEntry(entry.getName()) || entry.isDirectory()) {
            continue;
        }
        try {
            ret.put(filename, File.createTempFile(filename, null));
            LOG.debug("Incoming timestamp on zip - " + filename + " - " + entry.getTime());
            ret.get(filename).deleteOnExit();
            out = new FileOutputStream(ret.get(filename));
            IOUtils.copy(zipin, out);
            ret.get(filename).setLastModified(entry.getTime());
        } finally {
            // we must always close the output file
            if (out != null)
                out.close();
        }
    } while ((entry = zipin.getNextEntry()) != null);
    baseDirName = tryToGuessBaseDir(ret.keySet());
    if (baseDirName != null) {
        for (String key : new HashSet<String>(ret.keySet())) {
            ret.put(key.replace(baseDirName + "/", ""), ret.remove(key));
        }
    }
    return ret;
}

From source file:com.adito.language.LanguagePackManager.java

void addFileJarCategory(URL url) throws IOException {

    InputStream in = null;//from  ww  w .  j  av  a 2s  .c  om
    try {
        File f = new File(Util.urlDecode(url.getPath()));
        if (!f.exists()) {
            return;
        }
        in = new FileInputStream(Util.urlDecode(url.getPath()));
        ZipInputStream zin = new ZipInputStream(in);
        while (true) {
            ZipEntry entry = zin.getNextEntry();
            if (entry == null) {
                break;
            }
            String path = entry.getName();
            String name = path;
            String resourceBundleId = path;
            int idx = name.lastIndexOf('/');
            if (idx != -1) {
                resourceBundleId = name.substring(0, idx).replace('/', '.');
                ;
                name = name.substring(idx + 1);
            }
            if (name.equals("ApplicationResources.properties")) {
                LanguageCategory category = new LanguageCategory(zin, url, path, resourceBundleId);
                if (!detectedHaCategories.containsKey(category.getId())) {
                    detectedCategories.add(category);
                    detectedHaCategories.put(category.getId(), category);
                }
            }
        }
    } finally {
        Util.closeStream(in);
    }

}

From source file:edu.mit.lib.bagit.Loader.java

private void inflate(InputStream in, String fmt) throws IOException {
    switch (fmt) {
    case "zip":
        ZipInputStream zin = new ZipInputStream(in);
        ZipEntry entry = null;//from w  ww. j av  a 2  s  .  com
        while ((entry = zin.getNextEntry()) != null) {
            File outFile = new File(base.getParent(), entry.getName());
            outFile.getParentFile().mkdirs();
            Files.copy(zin, outFile.toPath());
        }
        zin.close();
        break;
    case "tgz":
        TarArchiveInputStream tin = new TarArchiveInputStream(new GzipCompressorInputStream(in));
        TarArchiveEntry tentry = null;
        while ((tentry = tin.getNextTarEntry()) != null) {
            File outFile = new File(base.getParent(), tentry.getName());
            outFile.getParentFile().mkdirs();
            Files.copy(tin, outFile.toPath());
        }
        tin.close();
        break;
    default:
        throw new IOException("Unsupported archive format: " + fmt);
    }
}