Example usage for java.util.zip ZipOutputStream setLevel

List of usage examples for java.util.zip ZipOutputStream setLevel

Introduction

In this page you can find the example usage for java.util.zip ZipOutputStream setLevel.

Prototype

public void setLevel(int level) 

Source Link

Document

Sets the compression level for subsequent entries which are DEFLATED.

Usage

From source file:com.joliciel.talismane.machineLearning.linearsvm.LinearSVMOneVsRestModel.java

@Override
public void writeModelToStream(OutputStream outputStream) {
    try {/* w w  w .  ja va  2 s . c o m*/
        ZipOutputStream zos = new ZipOutputStream(outputStream);
        zos.setLevel(ZipOutputStream.STORED);
        int i = 0;
        for (Model model : models) {
            LOG.debug("Writing model " + i + " for outcome " + outcomes.get(i));
            ZipEntry zipEntry = new ZipEntry("model" + i);
            i++;
            zos.putNextEntry(zipEntry);
            Writer writer = new OutputStreamWriter(zos, "UTF-8");
            Writer unclosableWriter = new UnclosableWriter(writer);
            model.save(unclosableWriter);
            zos.closeEntry();
            zos.flush();
        }
    } catch (UnsupportedEncodingException e) {
        LogUtils.logError(LOG, e);
        throw new RuntimeException(e);
    } catch (IOException e) {
        LogUtils.logError(LOG, e);
        throw new RuntimeException(e);
    }
}

From source file:org.nuxeo.ecm.server.info.service.impl.ServerInfoCollectorImpl.java

@Override
public File collectInfoAsZip() throws Exception {

    // TODO: let the caller delete the file when done with it
    File tmpFile = File.createTempFile("NXserverInfo-", ".zip");
    tmpFile.deleteOnExit();//from  ww w.j  a  v a2  s  . c  om

    FileOutputStream fos = new FileOutputStream(tmpFile);
    ZipOutputStream out = new ZipOutputStream(fos);
    out.setLevel(Deflater.DEFAULT_COMPRESSION);
    try {
        // System
        // TODO: externalize logic from Seam component to a service
        SystemInfoManager sysInfoManager = new SystemInfoManager();
        String sysInfo = sysInfoManager.getHostInfo();
        addZipEntry(out, "system.info", sysInfo);

        // Nuxeo distrib
        SimplifiedServerInfo serverInfo = RuntimeInstrospection.getInfo();
        String distribInfo = getDistribInfo(serverInfo);
        addZipEntry(out, "distrib.info", distribInfo);

        // Installed marketplace packages
        ConfigurationGenerator cg = new ConfigurationGenerator();
        // String installedPackages = getInstalledPackages(cg.getEnv());
        // addZipEntry(out, "packages.info", installedPackages);

        // Nuxeo configuration (nuxeo.conf)
        File configFile = cg.getNuxeoConf();
        addZipEntry(out, "nuxeo.conf", configFile);

        // Server logs (server.log)
        File logFile = new File(cg.getLogDir(), "server.log");
        addZipEntry(out, "server.log", logFile);

        return tmpFile;
    } finally {
        try {
            out.close();
            fos.close();
        } catch (IOException e) {
            log.error("Error while closing output streams");
        }
    }
}

From source file:org.apache.syncope.core.logic.report.ReportJob.java

@SuppressWarnings("rawtypes")
@Override/*from  w w w.ja  v a 2  s .  c om*/
public void execute(final JobExecutionContext context) throws JobExecutionException {
    Report report = reportDAO.find(reportKey);
    if (report == null) {
        throw new JobExecutionException("Report " + reportKey + " not found");
    }

    // 1. create execution
    ReportExec execution = entityFactory.newEntity(ReportExec.class);
    execution.setStatus(ReportExecStatus.STARTED);
    execution.setStartDate(new Date());
    execution.setReport(report);
    execution = reportExecDAO.save(execution);

    report.addExec(execution);
    report = reportDAO.save(report);

    // 2. define a SAX handler for generating result as XML
    TransformerHandler handler;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ZipOutputStream zos = new ZipOutputStream(baos);
    zos.setLevel(Deflater.BEST_COMPRESSION);
    try {
        SAXTransformerFactory tFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
        handler = tFactory.newTransformerHandler();
        Transformer serializer = handler.getTransformer();
        serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING);
        serializer.setOutputProperty(OutputKeys.INDENT, "yes");

        // a single ZipEntry in the ZipOutputStream
        zos.putNextEntry(new ZipEntry(report.getName()));

        // streaming SAX handler in a compressed byte array stream
        handler.setResult(new StreamResult(zos));
    } catch (Exception e) {
        throw new JobExecutionException("While configuring for SAX generation", e, true);
    }

    execution.setStatus(ReportExecStatus.RUNNING);
    execution = reportExecDAO.save(execution);

    // 3. actual report execution
    StringBuilder reportExecutionMessage = new StringBuilder();
    try {
        // report header
        handler.startDocument();
        AttributesImpl atts = new AttributesImpl();
        atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, report.getName());
        handler.startElement("", "", ReportXMLConst.ELEMENT_REPORT, atts);

        // iterate over reportlet instances defined for this report
        for (ReportletConf reportletConf : report.getReportletConfs()) {
            Class<Reportlet> reportletClass = dataBinder
                    .findReportletClassHavingConfClass(reportletConf.getClass());
            if (reportletClass != null) {
                Reportlet<ReportletConf> autowired = (Reportlet<ReportletConf>) ApplicationContextProvider
                        .getBeanFactory()
                        .createBean(reportletClass, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
                autowired.setConf(reportletConf);

                // invoke reportlet
                try {
                    autowired.extract(handler);
                } catch (Exception e) {
                    execution.setStatus(ReportExecStatus.FAILURE);

                    Throwable t = e instanceof ReportException ? e.getCause() : e;
                    reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(t))
                            .append("\n==================\n");
                }
            }
        }

        // report footer
        handler.endElement("", "", ReportXMLConst.ELEMENT_REPORT);
        handler.endDocument();

        if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {
            execution.setStatus(ReportExecStatus.SUCCESS);
        }
    } catch (Exception e) {
        execution.setStatus(ReportExecStatus.FAILURE);
        reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(e));

        throw new JobExecutionException(e, true);
    } finally {
        try {
            zos.closeEntry();
            IOUtils.closeQuietly(zos);
            IOUtils.closeQuietly(baos);
        } catch (IOException e) {
            LOG.error("While closing StreamResult's backend", e);
        }

        execution.setExecResult(baos.toByteArray());
        execution.setMessage(reportExecutionMessage.toString());
        execution.setEndDate(new Date());
        reportExecDAO.save(execution);
    }
}

From source file:org.apache.syncope.core.report.ReportJob.java

@SuppressWarnings("rawtypes")
@Override/*w w w .j  a v  a 2s  .c o m*/
public void execute(final JobExecutionContext context) throws JobExecutionException {
    Report report = reportDAO.find(reportId);
    if (report == null) {
        throw new JobExecutionException("Report " + reportId + " not found");
    }

    // 1. create execution
    ReportExec execution = new ReportExec();
    execution.setStatus(ReportExecStatus.STARTED);
    execution.setStartDate(new Date());
    execution.setReport(report);
    execution = reportExecDAO.save(execution);

    report.addExec(execution);
    report = reportDAO.save(report);

    // 2. define a SAX handler for generating result as XML
    TransformerHandler handler;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ZipOutputStream zos = new ZipOutputStream(baos);
    zos.setLevel(Deflater.BEST_COMPRESSION);
    try {
        SAXTransformerFactory tFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
        handler = tFactory.newTransformerHandler();
        Transformer serializer = handler.getTransformer();
        serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING);
        serializer.setOutputProperty(OutputKeys.INDENT, "yes");

        // a single ZipEntry in the ZipOutputStream
        zos.putNextEntry(new ZipEntry(report.getName()));

        // streaming SAX handler in a compressed byte array stream
        handler.setResult(new StreamResult(zos));
    } catch (Exception e) {
        throw new JobExecutionException("While configuring for SAX generation", e, true);
    }

    execution.setStatus(ReportExecStatus.RUNNING);
    execution = reportExecDAO.save(execution);

    // 3. actual report execution
    StringBuilder reportExecutionMessage = new StringBuilder();
    StringWriter exceptionWriter = new StringWriter();
    try {
        // report header
        handler.startDocument();
        AttributesImpl atts = new AttributesImpl();
        atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, report.getName());
        handler.startElement("", "", ReportXMLConst.ELEMENT_REPORT, atts);

        // iterate over reportlet instances defined for this report
        for (ReportletConf reportletConf : report.getReportletConfs()) {
            Class<Reportlet> reportletClass = dataBinder
                    .findReportletClassHavingConfClass(reportletConf.getClass());
            if (reportletClass != null) {
                Reportlet autowired = (Reportlet) ApplicationContextProvider.getBeanFactory()
                        .createBean(reportletClass, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
                autowired.setConf(reportletConf);

                // invoke reportlet
                try {
                    autowired.extract(handler);
                } catch (Exception e) {
                    execution.setStatus(ReportExecStatus.FAILURE);

                    Throwable t = e instanceof ReportException ? e.getCause() : e;
                    reportExecutionMessage.append(ExceptionUtil.getFullStackTrace(t))
                            .append("\n==================\n");
                }
            }
        }

        // report footer
        handler.endElement("", "", ReportXMLConst.ELEMENT_REPORT);
        handler.endDocument();

        if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {
            execution.setStatus(ReportExecStatus.SUCCESS);
        }
    } catch (Exception e) {
        execution.setStatus(ReportExecStatus.FAILURE);
        reportExecutionMessage.append(ExceptionUtil.getFullStackTrace(e));

        throw new JobExecutionException(e, true);
    } finally {
        try {
            zos.closeEntry();
            IOUtils.closeQuietly(zos);
            IOUtils.closeQuietly(baos);
        } catch (IOException e) {
            LOG.error("While closing StreamResult's backend", e);
        }

        execution.setExecResult(baos.toByteArray());
        execution.setMessage(reportExecutionMessage.toString());
        execution.setEndDate(new Date());
        reportExecDAO.save(execution);
    }
}

From source file:org.messic.server.api.APIPlayLists.java

@Transactional
public void getPlaylistZip(User user, Long playlistSid, OutputStream os)
        throws IOException, SidNotFoundMessicException {
    MDOPlaylist mdoplaylist = daoPlaylist.get(user.getLogin(), playlistSid);
    if (mdoplaylist == null) {
        throw new SidNotFoundMessicException();
    }/*from  ww  w .  j  a  va2 s.c  o m*/
    List<MDOSong> desiredSongs = mdoplaylist.getSongs();

    ZipOutputStream zos = new ZipOutputStream(os);
    // level - the compression level (0-9)
    zos.setLevel(9);

    HashMap<String, String> songs = new HashMap<String, String>();

    M3U m3u = new M3U();
    m3u.setExtensionM3U(true);
    List<Resource> resources = m3u.getResources();

    for (MDOSong song : desiredSongs) {
        if (song != null) {

            // add file
            // extract the relative name for entry purpose
            String entryName = song.getLocation();
            if (songs.get(entryName) == null) {
                Resource r = new Resource();
                r.setLocation(song.getLocation());
                r.setName(song.getName());
                resources.add(r);

                songs.put(entryName, "ok");
                // song not repeated
                ZipEntry ze = new ZipEntry(entryName);
                zos.putNextEntry(ze);
                FileInputStream in = new FileInputStream(song.calculateAbsolutePath(daoSettings.getSettings()));
                int len;
                byte buffer[] = new byte[1024];
                while ((len = in.read(buffer)) > 0) {
                    zos.write(buffer, 0, len);
                }
                in.close();
                zos.closeEntry();
            }
        }
    }

    // the last is the playlist m3u
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try {
        m3u.writeTo(baos, "UTF8");
        // song not repeated
        ZipEntry ze = new ZipEntry(mdoplaylist.getName() + ".m3u");
        zos.putNextEntry(ze);
        byte[] bytes = baos.toByteArray();
        zos.write(bytes, 0, bytes.length);
        zos.closeEntry();
    } catch (Exception e) {
        e.printStackTrace();
    }

    zos.close();
}

From source file:org.apache.syncope.core.logic.report.ReportJobDelegate.java

@Transactional
public void execute(final String reportKey) throws JobExecutionException {
    Report report = reportDAO.find(reportKey);
    if (report == null) {
        throw new JobExecutionException("Report " + reportKey + " not found");
    }//  www . j  a  v  a2 s  . co  m

    if (!report.isActive()) {
        LOG.info("Report {} not active, aborting...", reportKey);
        return;
    }

    // 1. create execution
    ReportExec execution = entityFactory.newEntity(ReportExec.class);
    execution.setStatus(ReportExecStatus.STARTED);
    execution.setStart(new Date());
    execution.setReport(report);
    execution = reportExecDAO.save(execution);

    report.add(execution);
    report = reportDAO.save(report);

    // 2. define a SAX handler for generating result as XML
    TransformerHandler handler;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ZipOutputStream zos = new ZipOutputStream(baos);
    zos.setLevel(Deflater.BEST_COMPRESSION);
    try {
        SAXTransformerFactory tFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
        tFactory.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, true);
        handler = tFactory.newTransformerHandler();
        Transformer serializer = handler.getTransformer();
        serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING);
        serializer.setOutputProperty(OutputKeys.INDENT, "yes");

        // a single ZipEntry in the ZipOutputStream
        zos.putNextEntry(new ZipEntry(report.getName()));

        // streaming SAX handler in a compressed byte array stream
        handler.setResult(new StreamResult(zos));
    } catch (Exception e) {
        throw new JobExecutionException("While configuring for SAX generation", e, true);
    }

    execution.setStatus(ReportExecStatus.RUNNING);
    execution = reportExecDAO.save(execution);

    // 3. actual report execution
    StringBuilder reportExecutionMessage = new StringBuilder();
    try {
        // report header
        handler.startDocument();
        AttributesImpl atts = new AttributesImpl();
        atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, report.getName());
        handler.startElement("", "", ReportXMLConst.ELEMENT_REPORT, atts);

        // iterate over reportlet instances defined for this report
        for (ReportletConf reportletConf : report.getReportletConfs()) {
            Class<? extends Reportlet> reportletClass = implementationLookup
                    .getReportletClass(reportletConf.getClass());
            if (reportletClass == null) {
                LOG.warn("Could not find matching reportlet for {}", reportletConf.getClass());
            } else {
                // fetch (or create) reportlet
                Reportlet reportlet;
                if (ApplicationContextProvider.getBeanFactory().containsSingleton(reportletClass.getName())) {
                    reportlet = (Reportlet) ApplicationContextProvider.getBeanFactory()
                            .getSingleton(reportletClass.getName());
                } else {
                    reportlet = (Reportlet) ApplicationContextProvider.getBeanFactory()
                            .createBean(reportletClass, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
                    ApplicationContextProvider.getBeanFactory().registerSingleton(reportletClass.getName(),
                            reportlet);
                }

                // invoke reportlet
                try {
                    reportlet.extract(reportletConf, handler);
                } catch (Throwable t) {
                    LOG.error("While executing reportlet {} for report {}", reportlet, reportKey, t);

                    execution.setStatus(ReportExecStatus.FAILURE);

                    Throwable effective = t instanceof ReportException ? t.getCause() : t;
                    reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(effective))
                            .append("\n==================\n");
                }
            }
        }

        // report footer
        handler.endElement("", "", ReportXMLConst.ELEMENT_REPORT);
        handler.endDocument();

        if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {
            execution.setStatus(ReportExecStatus.SUCCESS);
        }
    } catch (Exception e) {
        execution.setStatus(ReportExecStatus.FAILURE);
        reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(e));

        throw new JobExecutionException(e, true);
    } finally {
        try {
            zos.closeEntry();
            IOUtils.closeQuietly(zos);
            IOUtils.closeQuietly(baos);
        } catch (IOException e) {
            LOG.error("While closing StreamResult's backend", e);
        }

        execution.setExecResult(baos.toByteArray());
        execution.setMessage(reportExecutionMessage.toString());
        execution.setEnd(new Date());
        reportExecDAO.save(execution);
    }
}

From source file:org.geoserver.wps.ppio.ZipArchivePPIO.java

/**
 * Encodes the output file./*from ww  w .j av  a2s.  c o m*/
 * 
 * @param output the output
 * @param os the os
 * @throws Exception the exception
 */
@SuppressWarnings("rawtypes")
@Override
public void encode(final Object output, OutputStream os) throws Exception {
    // avoid double zipping
    if (output instanceof File && isZpFile((File) output)) {
        if (LOGGER.isLoggable(Level.FINE)) {
            LOGGER.log(Level.FINE, "File is already a zip, we have only to copy it");
        }
        FileUtils.copyFile((File) output, os);
        return;
    }

    ZipOutputStream zipout = new ZipOutputStream(os);
    zipout.setLevel(compressionLevel);

    // directory
    if (output instanceof File) {
        if (LOGGER.isLoggable(Level.FINE)) {
            LOGGER.log(Level.FINE, "Zipping the file");
        }
        final File file = ((File) output);
        if (file.isDirectory()) {
            IOUtils.zipDirectory(file, zipout, FileFilterUtils.trueFileFilter());
        } else {
            // check if is a zip file already
            zipFile(file, zipout);
        }
    } else {
        // list of files
        if (output instanceof Collection) {
            if (LOGGER.isLoggable(Level.FINE)) {
                LOGGER.log(Level.FINE, "Zipping the collection");
            }
            // create temp dir
            final Collection collection = (Collection) output;
            for (Object obj : collection) {
                if (obj instanceof File) {
                    // convert to file and add to zip
                    final File file = ((File) obj);
                    if (file.isDirectory()) {
                        IOUtils.zipDirectory(file, zipout, FileFilterUtils.trueFileFilter());
                    } else {
                        // check if is a zip file already
                        zipFile(file, zipout);
                    }
                } else {
                    if (LOGGER.isLoggable(Level.INFO)) {
                        LOGGER.info("Skipping object -->" + obj.toString());
                    }
                }
            }
        } else {
            // error
            throw new IllegalArgumentException("Unable to zip provided output. Output-->" + output != null
                    ? output.getClass().getCanonicalName()
                    : "null");
        }
    }
    if (LOGGER.isLoggable(Level.FINE)) {
        LOGGER.log(Level.FINE, "Finished to zip");
    }
    zipout.finish();
}

From source file:org.syncope.core.scheduling.ReportJob.java

@Override
public void execute(final JobExecutionContext context) throws JobExecutionException {

    Report report = reportDAO.find(reportId);
    if (report == null) {
        throw new JobExecutionException("Report " + reportId + " not found");
    }//from  w  w w .j  a  va  2 s . c o m

    // 1. create execution
    ReportExec execution = new ReportExec();
    execution.setStatus(ReportExecStatus.STARTED);
    execution.setStartDate(new Date());
    execution.setReport(report);
    execution = reportExecDAO.save(execution);

    // 2. define a SAX handler for generating result as XML
    TransformerHandler handler;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ZipOutputStream zos = new ZipOutputStream(baos);
    zos.setLevel(Deflater.BEST_COMPRESSION);
    try {
        SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
        handler = transformerFactory.newTransformerHandler();
        Transformer serializer = handler.getTransformer();
        serializer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
        serializer.setOutputProperty(OutputKeys.INDENT, "yes");

        // a single ZipEntry in the ZipOutputStream
        zos.putNextEntry(new ZipEntry(report.getName()));

        // streaming SAX handler in a compressed byte array stream
        handler.setResult(new StreamResult(zos));
    } catch (Exception e) {
        throw new JobExecutionException("While configuring for SAX generation", e, true);
    }

    execution.setStatus(ReportExecStatus.RUNNING);
    execution = reportExecDAO.save(execution);

    ConfigurableListableBeanFactory beanFactory = ApplicationContextManager.getApplicationContext()
            .getBeanFactory();

    // 3. actual report execution
    StringBuilder reportExecutionMessage = new StringBuilder();
    StringWriter exceptionWriter = new StringWriter();
    try {
        // report header
        handler.startDocument();
        AttributesImpl atts = new AttributesImpl();
        atts.addAttribute("", "", ATTR_NAME, XSD_STRING, report.getName());
        handler.startElement("", "", ELEMENT_REPORT, atts);

        // iterate over reportlet instances defined for this report
        for (ReportletConf reportletConf : report.getReportletConfs()) {
            Class reportletClass = null;
            try {
                reportletClass = Class.forName(reportletConf.getReportletClassName());
            } catch (ClassNotFoundException e) {
                LOG.error("Reportlet class not found: {}", reportletConf.getReportletClassName(), e);

            }

            if (reportletClass != null) {
                Reportlet autowired = (Reportlet) beanFactory.createBean(reportletClass,
                        AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
                autowired.setConf(reportletConf);

                // invoke reportlet
                try {
                    autowired.extract(handler);
                } catch (Exception e) {
                    execution.setStatus(ReportExecStatus.FAILURE);

                    Throwable t = e instanceof ReportException ? e.getCause() : e;
                    exceptionWriter.write(t.getMessage() + "\n\n");
                    t.printStackTrace(new PrintWriter(exceptionWriter));
                    reportExecutionMessage.append(exceptionWriter.toString()).append("\n==================\n");
                }
            }
        }

        // report footer
        handler.endElement("", "", ELEMENT_REPORT);
        handler.endDocument();

        if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {

            execution.setStatus(ReportExecStatus.SUCCESS);
        }
    } catch (Exception e) {
        execution.setStatus(ReportExecStatus.FAILURE);

        exceptionWriter.write(e.getMessage() + "\n\n");
        e.printStackTrace(new PrintWriter(exceptionWriter));
        reportExecutionMessage.append(exceptionWriter.toString());

        throw new JobExecutionException(e, true);
    } finally {
        try {
            zos.closeEntry();
            zos.close();
            baos.close();
        } catch (IOException e) {
            LOG.error("While closing StreamResult's backend", e);
        }

        execution.setExecResult(baos.toByteArray());
        execution.setMessage(reportExecutionMessage.toString());
        execution.setEndDate(new Date());
        reportExecDAO.save(execution);
    }
}

From source file:it.geosolutions.mariss.wps.ppio.OutputResourcesPPIO.java

@Override
public void encode(Object value, OutputStream os) throws Exception {

    ZipOutputStream zos = null;
    try {//from   ww  w  .j  a  va2s. co m
        OutputResource or = (OutputResource) value;

        zos = new ZipOutputStream(os);
        zos.setMethod(ZipOutputStream.DEFLATED);
        zos.setLevel(Deflater.DEFAULT_COMPRESSION);

        Iterator<File> iter = or.getDeletableResourcesIterator();
        while (iter.hasNext()) {

            File tmp = iter.next();
            if (!tmp.exists() || !tmp.canRead() || !tmp.canWrite()) {
                LOGGER.warning("Skip Deletable file '" + tmp.getName() + "' some problems occurred...");
                continue;
            }

            addToZip(tmp, zos);

            if (!tmp.delete()) {
                LOGGER.warning("File '" + tmp.getName() + "' cannot be deleted...");
            }
        }
        iter = null;

        Iterator<File> iter2 = or.getUndeletableResourcesIterator();
        while (iter2.hasNext()) {

            File tmp = iter2.next();
            if (!tmp.exists() || !tmp.canRead()) {
                LOGGER.warning("Skip Undeletable file '" + tmp.getName() + "' some problems occurred...");
                continue;
            }

            addToZip(tmp, zos);

        }
    } finally {
        try {
            zos.close();
        } catch (IOException e) {
            LOGGER.severe(e.getMessage());
        }
    }
}

From source file:hd3gtv.embddb.network.DataBlock.java

byte[] getBytes(Protocol protocol) throws IOException {
    checkIfNotEmpty();// w w w.  j  av  a2  s .c  o m

    ByteArrayOutputStream byte_array_out_stream = new ByteArrayOutputStream(Protocol.BUFFER_SIZE);

    DataOutputStream dos = new DataOutputStream(byte_array_out_stream);
    dos.write(Protocol.APP_SOCKET_HEADER_TAG);
    dos.writeInt(Protocol.VERSION);

    /**
     * Start header name
     */
    dos.writeByte(0);
    byte[] request_name_data = request_name.getBytes(Protocol.UTF8);
    dos.writeInt(request_name_data.length);
    dos.write(request_name_data);

    /**
     * Start datas payload
     */
    dos.writeByte(1);

    /**
     * Get datas from zip
     */
    ZipOutputStream zos = new ZipOutputStream(dos);
    zos.setLevel(3);
    entries.forEach(entry -> {
        try {
            entry.toZip(zos);
        } catch (IOException e) {
            log.error("Can't add to zip", e);
        }
    });
    zos.flush();
    zos.finish();
    zos.close();

    dos.flush();
    dos.close();

    byte[] result = byte_array_out_stream.toByteArray();

    if (log.isTraceEnabled()) {
        log.trace("Make raw datas for " + request_name + Hexview.LINESEPARATOR + Hexview.tracelog(result));
    }

    return result;
}