Example usage for java.util.zip ZipEntry ZipEntry

List of usage examples for java.util.zip ZipEntry ZipEntry

Introduction

In this page you can find the example usage for java.util.zip ZipEntry ZipEntry.

Prototype

public ZipEntry(ZipEntry e) 

Source Link

Document

Creates a new zip entry with fields taken from the specified zip entry.

Usage

From source file:es.gob.afirma.signers.ooxml.be.fedict.eid.applet.service.signer.ooxml.AbstractOOXMLSignatureService.java

/** Obtiene el fichero OOXMLK firmado.
 * @param signatureData/*from   w  w  w . j  av a2 s.c om*/
 * @return Fichero OOXML firmado
 * @throws IOException
 * @throws ParserConfigurationException
 * @throws SAXException
 * @throws TransformerException
 */
public final byte[] outputSignedOfficeOpenXMLDocument(final byte[] signatureData)
        throws IOException, ParserConfigurationException, SAXException, TransformerException {

    final ByteArrayOutputStream signedOOXMLOutputStream = new ByteArrayOutputStream();

    final String signatureZipEntryName = "_xmlsignatures/sig-" + UUID.randomUUID().toString() + ".xml"; //$NON-NLS-1$ //$NON-NLS-2$

    /*
     * Copy the original OOXML content to the signed OOXML package. During
     * copying some files need to changed.
     */
    final ZipOutputStream zipOutputStream = copyOOXMLContent(signatureZipEntryName, signedOOXMLOutputStream);

    // Add the OOXML XML signature file to the OOXML package.
    zipOutputStream.putNextEntry(new ZipEntry(signatureZipEntryName));
    IOUtils.write(signatureData, zipOutputStream);
    zipOutputStream.close();

    return signedOOXMLOutputStream.toByteArray();
}

From source file:io.druid.java.util.common.CompressionUtils.java

/**
 * Zips the contents of the input directory to the output stream. Sub directories are skipped
 *
 * @param directory The directory whose contents should be added to the zip in the output stream.
 * @param out       The output stream to write the zip data to. Caller is responsible for closing this stream.
 *
 * @return The number of bytes (uncompressed) read from the input directory.
 *
 * @throws IOException//from w w  w. j  a  va2s  .  c om
 */
public static long zip(File directory, OutputStream out) throws IOException {
    if (!directory.isDirectory()) {
        throw new IOE("directory[%s] is not a directory", directory);
    }

    final ZipOutputStream zipOut = new ZipOutputStream(out);

    long totalSize = 0;
    for (File file : directory.listFiles()) {
        log.info("Adding file[%s] with size[%,d].  Total size so far[%,d]", file, file.length(), totalSize);
        if (file.length() >= Integer.MAX_VALUE) {
            zipOut.finish();
            throw new IOE("file[%s] too large [%,d]", file, file.length());
        }
        zipOut.putNextEntry(new ZipEntry(file.getName()));
        totalSize += Files.asByteSource(file).copyTo(zipOut);
    }
    zipOut.closeEntry();
    // Workaround for http://hg.openjdk.java.net/jdk8/jdk8/jdk/rev/759aa847dcaf
    zipOut.flush();
    zipOut.finish();

    return totalSize;
}

From source file:org.syncope.core.scheduling.ReportJob.java

@Override
public void execute(final JobExecutionContext context) throws JobExecutionException {

    Report report = reportDAO.find(reportId);
    if (report == null) {
        throw new JobExecutionException("Report " + reportId + " not found");
    }//from  www  .j a v  a2  s . c  om

    // 1. create execution
    ReportExec execution = new ReportExec();
    execution.setStatus(ReportExecStatus.STARTED);
    execution.setStartDate(new Date());
    execution.setReport(report);
    execution = reportExecDAO.save(execution);

    // 2. define a SAX handler for generating result as XML
    TransformerHandler handler;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ZipOutputStream zos = new ZipOutputStream(baos);
    zos.setLevel(Deflater.BEST_COMPRESSION);
    try {
        SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
        handler = transformerFactory.newTransformerHandler();
        Transformer serializer = handler.getTransformer();
        serializer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
        serializer.setOutputProperty(OutputKeys.INDENT, "yes");

        // a single ZipEntry in the ZipOutputStream
        zos.putNextEntry(new ZipEntry(report.getName()));

        // streaming SAX handler in a compressed byte array stream
        handler.setResult(new StreamResult(zos));
    } catch (Exception e) {
        throw new JobExecutionException("While configuring for SAX generation", e, true);
    }

    execution.setStatus(ReportExecStatus.RUNNING);
    execution = reportExecDAO.save(execution);

    ConfigurableListableBeanFactory beanFactory = ApplicationContextManager.getApplicationContext()
            .getBeanFactory();

    // 3. actual report execution
    StringBuilder reportExecutionMessage = new StringBuilder();
    StringWriter exceptionWriter = new StringWriter();
    try {
        // report header
        handler.startDocument();
        AttributesImpl atts = new AttributesImpl();
        atts.addAttribute("", "", ATTR_NAME, XSD_STRING, report.getName());
        handler.startElement("", "", ELEMENT_REPORT, atts);

        // iterate over reportlet instances defined for this report
        for (ReportletConf reportletConf : report.getReportletConfs()) {
            Class reportletClass = null;
            try {
                reportletClass = Class.forName(reportletConf.getReportletClassName());
            } catch (ClassNotFoundException e) {
                LOG.error("Reportlet class not found: {}", reportletConf.getReportletClassName(), e);

            }

            if (reportletClass != null) {
                Reportlet autowired = (Reportlet) beanFactory.createBean(reportletClass,
                        AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
                autowired.setConf(reportletConf);

                // invoke reportlet
                try {
                    autowired.extract(handler);
                } catch (Exception e) {
                    execution.setStatus(ReportExecStatus.FAILURE);

                    Throwable t = e instanceof ReportException ? e.getCause() : e;
                    exceptionWriter.write(t.getMessage() + "\n\n");
                    t.printStackTrace(new PrintWriter(exceptionWriter));
                    reportExecutionMessage.append(exceptionWriter.toString()).append("\n==================\n");
                }
            }
        }

        // report footer
        handler.endElement("", "", ELEMENT_REPORT);
        handler.endDocument();

        if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {

            execution.setStatus(ReportExecStatus.SUCCESS);
        }
    } catch (Exception e) {
        execution.setStatus(ReportExecStatus.FAILURE);

        exceptionWriter.write(e.getMessage() + "\n\n");
        e.printStackTrace(new PrintWriter(exceptionWriter));
        reportExecutionMessage.append(exceptionWriter.toString());

        throw new JobExecutionException(e, true);
    } finally {
        try {
            zos.closeEntry();
            zos.close();
            baos.close();
        } catch (IOException e) {
            LOG.error("While closing StreamResult's backend", e);
        }

        execution.setExecResult(baos.toByteArray());
        execution.setMessage(reportExecutionMessage.toString());
        execution.setEndDate(new Date());
        reportExecDAO.save(execution);
    }
}

From source file:com.thruzero.common.core.fs.walker.visitor.ZipCompressingVisitor.java

/**
 * Compress the given file and add it to the archive.
 *//*  ww w . ja v  a2 s.  co m*/
@Override
public void visitFile(final File file) throws IOException {
    logHelper.logZippingFile(file);

    // zip the file
    String relativePath = getRelativePath(file);
    ZipEntry zipEntry = new ZipEntry(relativePath);
    zipOut.putNextEntry(zipEntry);
    BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
    IOUtils.copy(bis, zipOut);
    zipOut.flush();
    zipOut.closeEntry();
    bis.close();

    getStatus().incNumProcessed();
}

From source file:io.apicurio.hub.api.codegen.OpenApi2Thorntail.java

/**
 * Generate the Thorntail project.//from w  w  w.j  ava  2  s  .c o  m
 * @throws IOException
 */
public ByteArrayOutputStream generate() throws IOException {
    ByteArrayOutputStream output = new ByteArrayOutputStream();

    CodegenInfo info = getInfoFromApiDoc();

    try (ZipOutputStream zos = new ZipOutputStream(output)) {
        if (!this.updateOnly) {
            zos.putNextEntry(new ZipEntry("pom.xml"));
            zos.write(generatePomXml(info).getBytes());
            zos.closeEntry();

            zos.putNextEntry(new ZipEntry("Dockerfile"));
            zos.write(generateDockerfile().getBytes());
            zos.closeEntry();

            zos.putNextEntry(new ZipEntry("openshift-template.yml"));
            zos.write(generateOpenshiftTemplate().getBytes());
            zos.closeEntry();

            zos.putNextEntry(new ZipEntry("src/main/resources/META-INF/microprofile-config.properties"));
            zos.write(generateMicroprofileConfigProperties().getBytes());
            zos.closeEntry();
        }

        zos.putNextEntry(new ZipEntry("src/main/resources/META-INF/openapi.json"));
        zos.write(this.openApiDoc.getBytes());
        zos.closeEntry();

        if (!this.updateOnly) {
            zos.putNextEntry(
                    new ZipEntry(javaPackageToZipPath(this.settings.javaPackage) + "JaxRsApplication.java"));
            zos.write(generateJaxRsApplication().getBytes());
            zos.closeEntry();
        }

        for (CodegenJavaInterface iface : info.getInterfaces()) {
            String javaInterface = generateJavaInterface(iface);
            zos.putNextEntry(
                    new ZipEntry(javaPackageToZipPath(iface.getPackage()) + iface.getName() + ".java"));
            zos.write(javaInterface.getBytes());
            zos.closeEntry();
        }

        IndexedCodeWriter codeWriter = new IndexedCodeWriter();
        for (CodegenJavaBean bean : info.getBeans()) {
            generateJavaBean(bean, info, codeWriter);
        }
        for (String key : codeWriter.getKeys()) {
            zos.putNextEntry(new ZipEntry(javaClassToZipPath(key)));
            zos.write(codeWriter.get(key).getBytes());
            zos.closeEntry();
        }
    }

    return output;
}

From source file:fr.cirad.mgdb.exporting.individualoriented.DARwinExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles,
        boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor,
        Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    GenotypingProject aProject = mongoTemplate.findOne(
            new Query(Criteria.where(GenotypingProject.FIELDNAME_PLOIDY_LEVEL).exists(true)),
            GenotypingProject.class);
    if (aProject == null)
        LOG.warn("Unable to find a project containing ploidy level information! Assuming ploidy level is 2.");

    int ploidy = aProject == null ? 2 : aProject.getPloidyLevel();

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }//w w w  . j av  a 2  s.c o m
        }

    String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size()
            + "individuals";

    StringBuffer donFileContents = new StringBuffer(
            "@DARwin 5.0 - DON -" + LINE_SEPARATOR + individualExportFiles.size() + "\t" + 1 + LINE_SEPARATOR
                    + "N" + "\t" + "individual" + LINE_SEPARATOR);

    int count = 0;
    String missingGenotype = "";
    for (int j = 0; j < ploidy; j++)
        missingGenotype += "\tN";

    zos.putNextEntry(new ZipEntry(exportName + ".var"));
    zos.write(("@DARwin 5.0 - ALLELIC - " + ploidy + LINE_SEPARATOR + individualExportFiles.size() + "\t"
            + markerCount * ploidy + LINE_SEPARATOR + "N").getBytes());

    DBCursor markerCursorCopy = markerCursor.copy(); // dunno how expensive this is, but seems safer than keeping all IDs in memory at any time

    short nProgress = 0, nPreviousProgress = 0;
    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    markerCursorCopy.batchSize(nChunkSize);

    int nMarkerIndex = 0;
    while (markerCursorCopy.hasNext()) {
        DBObject exportVariant = markerCursorCopy.next();
        Comparable markerId = (Comparable) exportVariant.get("_id");

        if (markerSynonyms != null) {
            Comparable syn = markerSynonyms.get(markerId);
            if (syn != null)
                markerId = syn;
        }
        for (int j = 0; j < ploidy; j++)
            zos.write(("\t" + markerId).getBytes());
    }

    TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>();
    ArrayList<String> distinctAlleles = new ArrayList<String>(); // the index of each allele will be used as its code
    int i = 0;
    for (File f : individualExportFiles) {
        BufferedReader in = new BufferedReader(new FileReader(f));
        try {
            String individualId, line = in.readLine(); // read sample id

            if (line != null)
                individualId = line;
            else
                throw new Exception("Unable to read first line of temp export file " + f.getName());

            donFileContents.append(++count + "\t" + individualId + LINE_SEPARATOR);

            zos.write((LINE_SEPARATOR + count).getBytes());
            nMarkerIndex = 0;

            while ((line = in.readLine()) != null) {
                List<String> genotypes = MgdbDao.split(line, "|");
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                int highestGenotypeCount = 0;
                String mostFrequentGenotype = null;
                for (String genotype : genotypes) {
                    if (genotype.length() == 0)
                        continue; /* skip missing genotypes */

                    int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                    if (gtCount > highestGenotypeCount) {
                        highestGenotypeCount = gtCount;
                        mostFrequentGenotype = genotype;
                    }
                    genotypeCounts.put(genotype, gtCount);
                }

                if (genotypeCounts.size() > 1) {
                    warningFileWriter.write("- Dissimilar genotypes found for variant __" + nMarkerIndex
                            + "__, individual " + individualId + ". Exporting most frequent: "
                            + mostFrequentGenotype + "\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                }

                String codedGenotype = "";
                if (mostFrequentGenotype != null)
                    for (String allele : mostFrequentGenotype.split(" ")) {
                        if (!distinctAlleles.contains(allele))
                            distinctAlleles.add(allele);
                        codedGenotype += "\t" + distinctAlleles.indexOf(allele);
                    }
                else
                    codedGenotype = missingGenotype.replaceAll("N", "-1"); // missing data is coded as -1
                zos.write(codedGenotype.getBytes());

                nMarkerIndex++;
            }
        } catch (Exception e) {
            LOG.error("Error exporting data", e);
            progress.setError("Error exporting data: " + e.getClass().getSimpleName()
                    + (e.getMessage() != null ? " - " + e.getMessage() : ""));
            return;
        } finally {
            in.close();
        }

        if (progress.hasAborted())
            return;

        nProgress = (short) (++i * 100 / individualExportFiles.size());
        if (nProgress > nPreviousProgress) {
            //            LOG.debug("============= doDARwinExport (" + i + "): " + nProgress + "% =============");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }

        if (!f.delete()) {
            f.deleteOnExit();
            LOG.info("Unable to delete tmp export file " + f.getAbsolutePath());
        }
    }

    zos.putNextEntry(new ZipEntry(exportName + ".don"));
    zos.write(donFileContents.toString().getBytes());

    // now read variant names for those that induced warnings
    nMarkerIndex = 0;
    markerCursor.batchSize(nChunkSize);
    while (markerCursor.hasNext()) {
        DBObject exportVariant = markerCursor.next();
        if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) {
            Comparable markerId = (Comparable) exportVariant.get("_id");

            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(markerId);
                if (syn != null)
                    markerId = syn;
            }
            for (int j = 0; j < ploidy; j++)
                zos.write(("\t" + markerId).getBytes());

            problematicMarkerIndexToNameMap.put(nMarkerIndex, markerId);
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet())
                sLine = sLine.replaceAll("__" + aMarkerIndex + "__",
                        problematicMarkerIndexToNameMap.get(aMarkerIndex).toString());
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:fr.ippon.wip.config.ZipConfiguration.java

/**
 * Create a zip archive from a configuration.
 * // w ww.j av a  2s  . c o m
 * @param configuration
 *            the configuration to zip
 * @param out
 *            the stream to be used
 */
public void zip(WIPConfiguration configuration, ZipOutputStream out) {
    XMLConfigurationDAO xmlConfigurationDAO = new XMLConfigurationDAO(FileUtils.getTempDirectoryPath());

    /*
     * a configuration with the same name may already has been unzipped in
     * the temp directory, so we try to delete it for avoiding name
     * modification (see ConfigurationDAO.correctConfigurationName).
     */
    xmlConfigurationDAO.delete(configuration);
    xmlConfigurationDAO.create(configuration);

    String configName = configuration.getName();

    try {
        int[] types = new int[] { XMLConfigurationDAO.FILE_NAME_CLIPPING,
                XMLConfigurationDAO.FILE_NAME_TRANSFORM, XMLConfigurationDAO.FILE_NAME_CONFIG };
        for (int type : types) {
            File file = xmlConfigurationDAO.getConfigurationFile(configName, type);
            ZipEntry entry = new ZipEntry(file.getName());
            out.putNextEntry(entry);
            copy(FileUtils.openInputStream(file), out);
            out.closeEntry();
        }

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.stimulus.archiva.presentation.ExportBean.java

@Override
protected StreamInfo getStreamInfo(ActionMapping mapping, ActionForm form, HttpServletRequest request,
        HttpServletResponse response) throws Exception {
    SearchBean searchBean = (SearchBean) form;

    String outputDir = Config.getFileSystem().getViewPath() + File.separatorChar;
    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
    String zipFileName = "export-" + sdf.format(new Date()) + ".zip";
    File zipFile = new File(outputDir + zipFileName);

    String agent = request.getHeader("USER-AGENT");
    if (null != agent && -1 != agent.indexOf("MSIE")) {
        String codedfilename = URLEncoder.encode(zipFileName, "UTF8");
        response.setContentType("application/x-download");
        response.setHeader("Content-Disposition", "attachment;filename=" + codedfilename);
    } else if (null != agent && -1 != agent.indexOf("Mozilla")) {
        String codedfilename = MimeUtility.encodeText(zipFileName, "UTF8", "B");
        response.setContentType("application/x-download");
        response.setHeader("Content-Disposition", "attachment;filename=" + codedfilename);
    } else {/* w  w w . jav  a2  s  .  c om*/
        response.setHeader("Content-Disposition", "attachment;filename=" + zipFileName);
    }

    logger.debug("size of searchResult = " + searchBean.getSearchResults().size());
    //MessageBean.viewMessage
    List<File> files = new ArrayList<File>();
    for (SearchResultBean searchResult : searchBean.getSearchResults()) {
        if (searchResult.getSelected()) {
            Email email = MessageService.getMessageByID(searchResult.getVolumeID(), searchResult.getUniqueID(),
                    false);

            HttpServletRequest hsr = ActionContext.getActionContext().getRequest();
            String baseURL = hsr.getRequestURL().substring(0,
                    hsr.getRequestURL().lastIndexOf(hsr.getServletPath()));
            MessageExtraction messageExtraction = MessageService.extractMessage(email, baseURL, true); // can take a while to extract message

            //              MessageBean mbean = new MessageBean();
            //              mbean.setMessageID(searchResult.getUniqueID());
            //              mbean.setVolumeID(searchResult.getVolumeID());
            //              writer.println(searchResult.toString());
            //              writer.println(messageExtraction.getFileName());

            File fileToAdd = new File(outputDir, messageExtraction.getFileName());
            if (!files.contains(fileToAdd)) {
                files.add(fileToAdd);
            }
        }
    }

    ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(zipFile));
    try {
        byte[] buf = new byte[1024];
        for (File f : files) {
            ZipEntry ze = new ZipEntry(f.getName());
            logger.debug("Adding file " + f.getName());
            zos.putNextEntry(ze);
            InputStream is = new BufferedInputStream(new FileInputStream(f));
            for (;;) {
                int len = is.read(buf);
                if (len < 0)
                    break;
                zos.write(buf, 0, len);
            }
            is.close();
            Config.getFileSystem().getTempFiles().markForDeletion(f);
        }
    } finally {
        zos.close();
    }
    logger.debug("download zipped emails {fileName='" + zipFileName + "'}");

    String contentType = "application/zip";
    Config.getFileSystem().getTempFiles().markForDeletion(zipFile);
    return new FileStreamInfo(contentType, zipFile);
}

From source file:ZipTransformTest.java

public void testStreamTransformer() throws IOException {
    final String name = "foo";
    final byte[] contents = "bar".getBytes();
    final byte[] transformed = "cbs".getBytes();

    File file1 = File.createTempFile("temp", null);
    File file2 = File.createTempFile("temp", null);
    try {//from   w  w  w  .j ava2  s.  c  o m
        // Create the ZIP file
        ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(file1));
        try {
            zos.putNextEntry(new ZipEntry(name));
            zos.write(contents);
            zos.closeEntry();
        } finally {
            IOUtils.closeQuietly(zos);
        }

        // Transform the ZIP file
        ZipUtil.transformEntry(file1, name, new StreamZipEntryTransformer() {
            protected void transform(ZipEntry zipEntry, InputStream in, OutputStream out) throws IOException {
                int b;
                while ((b = in.read()) != -1)
                    out.write(b + 1);
            }
        }, file2);

        // Test the ZipUtil
        byte[] actual = ZipUtil.unpackEntry(file2, name);
        assertNotNull(actual);
        assertEquals(new String(transformed), new String(actual));
    } finally {
        FileUtils.deleteQuietly(file1);
        FileUtils.deleteQuietly(file2);
    }
}

From source file:com.sangupta.jerry.util.ZipUtils.java

/**
 * Compresses the provided file into ZIP format adding a '.ZIP' at the end
 * of the filename.//from  www .  j  a  v a2 s. co  m
 * 
 * @param filePath
 *            the file path that needs to be compressed
 * 
 * @return returns the absolute path of the ZIP file.
 */
public String createZipFile(String filePath) {
    LOGGER.debug("Starting compression of " + filePath);

    String zipFilename = filePath + ".zip";
    LOGGER.debug("Creating zip file at " + zipFilename);

    byte[] buf = new byte[1024];

    ZipOutputStream stream = null;
    FileInputStream input = null;
    try {
        // Create the ZIP file
        stream = new ZipOutputStream(new FileOutputStream(zipFilename));

        // Compress the file
        File file = new File(filePath);
        input = new FileInputStream(file);

        // Add ZIP entry to output stream.
        stream.putNextEntry(new ZipEntry(file.getName()));

        // Transfer bytes from the file to the ZIP file
        int len;
        while ((len = input.read(buf)) > 0) {
            stream.write(buf, 0, len);
        }

        // Complete the entry
        stream.closeEntry();
    } catch (IOException e) {
        LOGGER.error("Unable to compress file " + filePath, e);
    } finally {
        IOUtils.closeQuietly(input);

        // Complete the ZIP file
        IOUtils.closeQuietly(stream);
    }

    return zipFilename;
}