Example usage for java.util.zip ZipOutputStream write

List of usage examples for java.util.zip ZipOutputStream write

Introduction

In this page you can find the example usage for java.util.zip ZipOutputStream write.

Prototype

public synchronized void write(byte[] b, int off, int len) throws IOException 

Source Link

Document

Writes an array of bytes to the current ZIP entry data.

Usage

From source file:edu.harvard.iq.dvn.core.study.StudyFileServiceBean.java

@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void addIngestedFiles(Long studyId, String versionNote, List fileBeans, Long userId) {
    // if no files, then just return
    if (fileBeans.isEmpty()) {
        return;//www  .  j  a v  a  2  s . co m
    }

    // first some initialization
    StudyVersion studyVersion = null;
    Study study = null;
    MD5Checksum md5Checksum = new MD5Checksum();

    study = em.find(Study.class, studyId);
    studyVersion = study.getEditVersion();
    if (studyVersion.getId() == null) {
        em.persist(studyVersion);
        em.flush();
    }

    studyVersion.setVersionNote(versionNote);

    VDCUser user = userService.find(userId);

    File newDir = new File(FileUtil.getStudyFileDir(),
            study.getAuthority() + File.separator + study.getStudyId());
    if (!newDir.exists()) {
        newDir.mkdirs();
    }

    // now iterate through fileBeans
    Iterator iter = fileBeans.iterator();
    while (iter.hasNext()) {
        StudyFileEditBean fileBean = (StudyFileEditBean) iter.next();

        // for now the logic is if the DSB does not return a file, don't copy
        // over anything; this is to cover the situation with the Ingest servlet
        // that uses takes a control card file to add a dataTable to a prexisting
        // file; this will have to change if we do this two files method at the
        // time of the original upload
        // (TODO: figure out what this comment means - ? - L.A.)
        // (is this some legacy thing? - it's talking about "ingest servlet"...)
        // (did we ever have a mechanism for adding a data table to an existing
        //  tab file?? - that's actually kinda cool)

        StudyFile f = fileBean.getStudyFile();

        // So, if there is a file: let's move it to its final destination
        // in the study directory. 
        //
        // First, if it's a subsettable or network, or any other
        // kind potentially, that gets transformed on ingest: 

        File newIngestedLocationFile = null;

        if (fileBean.getIngestedSystemFileLocation() != null) {

            String originalFileType = f.getFileType();

            // 1. move ingest-created file:

            File tempIngestedFile = new File(fileBean.getIngestedSystemFileLocation());
            newIngestedLocationFile = new File(newDir, f.getFileSystemName());
            try {
                FileUtil.copyFile(tempIngestedFile, newIngestedLocationFile);
                tempIngestedFile.delete();
                if (f instanceof TabularDataFile) {
                    f.setFileType("text/tab-separated-values");
                }
                f.setFileSystemLocation(newIngestedLocationFile.getAbsolutePath());

            } catch (IOException ex) {
                throw new EJBException(ex);
            }
            // 1b. If this is a NetworkDataFile,  move the SQLite file from the temp Ingested location to the system location
            if (f instanceof NetworkDataFile) {
                File tempSQLDataFile = new File(tempIngestedFile.getParent(), FileUtil
                        .replaceExtension(tempIngestedFile.getName(), NetworkDataServiceBean.SQLITE_EXTENSION));
                File newSQLDataFile = new File(newDir,
                        f.getFileSystemName() + "." + NetworkDataServiceBean.SQLITE_EXTENSION);

                File tempNeo4jDir = new File(tempIngestedFile.getParent(), FileUtil
                        .replaceExtension(tempIngestedFile.getName(), NetworkDataServiceBean.NEO4J_EXTENSION));
                File newNeo4jDir = new File(newDir,
                        f.getFileSystemName() + "." + NetworkDataServiceBean.NEO4J_EXTENSION);

                try {
                    FileUtil.copyFile(tempSQLDataFile, newSQLDataFile);
                    FileUtils.copyDirectory(tempNeo4jDir, newNeo4jDir);
                    tempSQLDataFile.delete();
                    FileUtils.deleteDirectory(tempNeo4jDir);
                    f.setOriginalFileType(originalFileType);

                } catch (IOException ex) {
                    throw new EJBException(ex);
                }
            }

            // 2. also move original file for archiving
            File tempOriginalFile = new File(fileBean.getTempSystemFileLocation());
            File newOriginalLocationFile = new File(newDir, "_" + f.getFileSystemName());
            try {
                if (fileBean.getControlCardSystemFileLocation() != null
                        && fileBean.getControlCardType() != null) {
                    // 2a. For the control card-based ingests (SPSS and DDI), we save
                    // a zipped bundle of both the card and the raw data file
                    // (TAB-delimited or CSV):

                    FileInputStream instream = null;
                    byte[] dataBuffer = new byte[8192];

                    ZipOutputStream zout = new ZipOutputStream(new FileOutputStream(newOriginalLocationFile));

                    // First, the control card:

                    File controlCardFile = new File(fileBean.getControlCardSystemFileLocation());

                    ZipEntry ze = new ZipEntry(controlCardFile.getName());
                    instream = new FileInputStream(controlCardFile);
                    zout.putNextEntry(ze);

                    int k = 0;
                    while ((k = instream.read(dataBuffer)) > 0) {
                        zout.write(dataBuffer, 0, k);
                        zout.flush();
                    }

                    instream.close();

                    // And then, the data file:

                    ze = new ZipEntry(tempOriginalFile.getName());
                    instream = new FileInputStream(tempOriginalFile);
                    zout.putNextEntry(ze);

                    while ((k = instream.read(dataBuffer)) > 0) {
                        zout.write(dataBuffer, 0, k);
                        zout.flush();
                    }

                    instream.close();

                    zout.close();

                    // and control card file can be deleted now:
                    controlCardFile.delete();

                    // Mime types: 
                    // These are custom, made-up types, used to identify the 
                    // type of the source data:

                    if (fileBean.getControlCardType().equals("spss")) {
                        f.setOriginalFileType("application/x-dvn-csvspss-zip");
                    } else if (fileBean.getControlCardType().equals("ddi")) {
                        f.setOriginalFileType("application/x-dvn-tabddi-zip");
                    } else {
                        logger.info("WARNING: unknown control card-based Ingest type? -- "
                                + fileBean.getControlCardType());
                        f.setOriginalFileType(originalFileType);
                    }
                    f.setMd5(md5Checksum.CalculateMD5(tempOriginalFile.getAbsolutePath()));

                } else {
                    // 2b. Otherwise, simply store the data that was used for
                    // ingest as the original:

                    FileUtil.copyFile(tempOriginalFile, newOriginalLocationFile);
                    f.setOriginalFileType(originalFileType);
                    f.setMd5(md5Checksum.CalculateMD5(newOriginalLocationFile.getAbsolutePath()));
                }
                tempOriginalFile.delete();
            } catch (IOException ex) {
                throw new EJBException(ex);
            }
        } else if (f instanceof SpecialOtherFile) {
            // "Special" OtherFiles are still OtherFiles; we just add the file
            // uploaded by the user to the study as is:

            File tempIngestedFile = new File(fileBean.getTempSystemFileLocation());
            newIngestedLocationFile = new File(newDir, f.getFileSystemName());
            try {
                FileUtil.copyFile(tempIngestedFile, newIngestedLocationFile);
                tempIngestedFile.delete();
                f.setFileSystemLocation(newIngestedLocationFile.getAbsolutePath());
                f.setMd5(md5Checksum.CalculateMD5(newIngestedLocationFile.getAbsolutePath()));
            } catch (IOException ex) {
                throw new EJBException(ex);
            }
        }

        // Finally, if the file was copied sucessfully, 
        // attach file to study version and study

        if (newIngestedLocationFile != null && newIngestedLocationFile.exists()) {

            fileBean.getFileMetadata().setStudyVersion(studyVersion);
            studyVersion.getFileMetadatas().add(fileBean.getFileMetadata());
            fileBean.getStudyFile().setStudy(study);
            // don't need to set study side, since we're no longer using persistence cache
            //study.getStudyFiles().add(fileBean.getStudyFile());
            //fileBean.addFiletoStudy(study);

            em.persist(fileBean.getStudyFile());
            em.persist(fileBean.getFileMetadata());

        } else {
            //fileBean.getStudyFile().setSubsettable(true);
            em.merge(fileBean.getStudyFile());
        }
    }
    // calcualte UNF for study version
    try {
        studyVersion.getMetadata().setUNF(new DSBWrapper().calculateUNF(studyVersion));
    } catch (IOException e) {
        throw new EJBException("Could not calculate new study UNF");
    }

    studyService.saveStudyVersion(studyVersion, user.getId());
}

From source file:com.hichinaschool.flashcards.libanki.Media.java

/**
 * Add files to a zip until over SYNC_ZIP_SIZE. Return zip data.
 * //from ww w . j a v  a 2  s.  c  om
 * @return Returns a tuple with two objects. The first one is the zip file contents, the second a list with the
 *         filenames of the files inside the zip.
 */
public Pair<File, List<String>> zipAdded() {
    File f = new File(mCol.getPath().replaceFirst("collection\\.anki2$", "tmpSyncToServer.zip"));

    String sql = "select fname from log where type = " + Integer.toString(MEDIA_ADD);
    List<String> filenames = mMediaDb.queryColumn(String.class, sql, 0);
    List<String> fnames = new ArrayList<String>();

    try {
        ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(f)));
        zos.setLevel(8);

        JSONObject files = new JSONObject();
        int cnt = 0;
        long sz = 0;
        byte buffer[] = new byte[2048];
        boolean finished = true;
        for (String fname : filenames) {
            fnames.add(fname);
            File file = new File(getDir(), fname);
            BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file), 2048);
            ZipEntry entry = new ZipEntry(Integer.toString(cnt));
            zos.putNextEntry(entry);
            int count = 0;
            while ((count = bis.read(buffer, 0, 2048)) != -1) {
                zos.write(buffer, 0, count);
            }
            zos.closeEntry();
            bis.close();
            files.put(Integer.toString(cnt), fname);
            sz += file.length();
            if (sz > SYNC_ZIP_SIZE) {
                finished = false;
                break;
            }
            cnt += 1;
        }
        if (finished) {
            zos.putNextEntry(new ZipEntry("_finished"));
            zos.closeEntry();
        }
        zos.putNextEntry(new ZipEntry("_meta"));
        zos.write(Utils.jsonToString(files).getBytes());
        zos.close();
    } catch (FileNotFoundException e) {
        throw new RuntimeException(e);
    } catch (IOException e) {
        throw new RuntimeException(e);
    } catch (JSONException e) {
        throw new RuntimeException(e);
    }

    return new Pair<File, List<String>>(f, fnames);
}

From source file:cross.io.misc.WorkflowZipper.java

private void addZipEntry(final int bufsize, final ZipOutputStream zos, final byte[] input_buffer,
        final File file, final HashSet<String> zipEntries) throws IOException {
    log.debug("Adding zip entry for file {}", file);
    if (file.exists() && file.isFile()) {
        // Use the file name for the ZipEntry name.
        final ZipEntry zip_entry = new ZipEntry(file.getName());
        if (zipEntries.contains(file.getName())) {
            log.info("Skipping duplicate zip entry {}", file.getName());
            return;
        } else {//from   ww w .  ja va2  s.  co m
            zipEntries.add(file.getName());
        }
        zos.putNextEntry(zip_entry);

        // Create a buffered input stream from the file stream.
        final FileInputStream in = new FileInputStream(file);
        // Read from source into buffer and write, thereby compressing
        // on the fly
        try (BufferedInputStream source = new BufferedInputStream(in, bufsize)) {
            // Read from source into buffer and write, thereby compressing
            // on the fly
            int len = 0;
            while ((len = source.read(input_buffer, 0, bufsize)) != -1) {
                zos.write(input_buffer, 0, len);
            }
            zos.flush();
        }
        zos.closeEntry();
    } else {
        log.warn("Skipping nonexistant file or directory {}", file);
    }
}

From source file:com.pari.nm.utils.backup.BackupRestore.java

private void addFileToZip(String path, File backupFileName, ZipOutputStream zip) throws Exception {
    if (backupFileName.isDirectory()) {
        addFolderToZip(path, backupFileName, zip);
    } else {/*from  ww w  .  jav  a2 s.  c o  m*/
        byte[] buf = new byte[1024];
        int len;
        FileInputStream in = null;
        try {
            in = new FileInputStream(backupFileName);
            zip.putNextEntry(new ZipEntry(path + File.separator + backupFileName.getName()));
            while ((len = in.read(buf)) > 0) {
                zip.write(buf, 0, len);
                if (jobCancelled) {
                    throw new Exception("job Cancelled");
                }
            }
        } finally {
            if (in != null) {
                in.close();
            }
        }
    }
}

From source file:com.flexive.core.storage.GenericDivisionExporter.java

/**
 * Dump a single file to a zip output stream
 *
 * @param zip  zip output stream/*  w w  w  .j  ava  2 s . c om*/
 * @param file the file to dump
 * @param path absolute base directory path (will be stripped in the archive from file)
 * @throws IOException on errors
 */
private void dumpFile(ZipOutputStream zip, File file, String path) throws IOException {
    if (file.isDirectory()) {
        for (File f : file.listFiles())
            dumpFile(zip, f, path);
        return;
    }
    ZipEntry ze = new ZipEntry(FOLDER_FS_BINARY + file.getAbsolutePath().substring(path.length()));
    zip.putNextEntry(ze);
    FileInputStream fis = null;
    try {
        fis = new FileInputStream(file);
        byte[] buffer = new byte[4096];
        int read;
        while ((read = fis.read(buffer)) != -1)
            zip.write(buffer, 0, read);
    } finally {
        if (fis != null)
            fis.close();
    }
    zip.closeEntry();
    zip.flush();
}

From source file:cross.io.misc.WorkflowZipper.java

private void addRelativeZipEntry(final int bufsize, final ZipOutputStream zos, final byte[] input_buffer,
        final String relativePath, final File file, final HashSet<String> zipEntries) throws IOException {
    log.debug("Adding zip entry for file {}", file);
    if (file.exists() && file.isFile()) {
        // Use the file name for the ZipEntry name.
        final ZipEntry zip_entry = new ZipEntry(relativePath);
        if (zipEntries.contains(relativePath)) {
            log.info("Skipping duplicate zip entry {}", relativePath + "/" + file.getName());
            return;
        } else {/*ww  w  .ja  v  a2  s.c o  m*/
            zipEntries.add(relativePath);
        }
        zos.putNextEntry(zip_entry);

        // Create a buffered input stream from the file stream.
        final FileInputStream in = new FileInputStream(file);
        // Read from source into buffer and write, thereby compressing
        // on the fly
        try (BufferedInputStream source = new BufferedInputStream(in, bufsize)) {
            // Read from source into buffer and write, thereby compressing
            // on the fly
            int len = 0;
            while ((len = source.read(input_buffer, 0, bufsize)) != -1) {
                zos.write(input_buffer, 0, len);
            }
            zos.flush();
        }
        zos.closeEntry();
    } else {
        log.warn("Skipping nonexistant file or directory {}", file);
    }
}

From source file:net.sourceforge.jweb.maven.mojo.PropertiesOverideMojo.java

public void execute() throws MojoExecutionException, MojoFailureException {
    if (disabled) {
        this.getLog().info("plugin was disabled");
        return;// w w  w  .j a  v  a 2s  . c  om
    }
    processConfiguration();
    if (replacements.isEmpty()) {
        this.getLog().info("Nothing to replace with");
        return;
    }

    String name = this.builddir.getAbsolutePath() + File.separator + this.finalName + "." + this.packing;//the final package
    this.getLog().debug("final artifact: " + name);// the final package

    try {
        File finalWarFile = new File(name);
        File tempFile = File.createTempFile(finalWarFile.getName(), null);
        tempFile.delete();//check deletion
        boolean renameOk = finalWarFile.renameTo(tempFile);
        if (!renameOk) {
            getLog().error("Can not rename file, please check.");
            return;
        }

        ZipOutputStream out = new ZipOutputStream(new FileOutputStream(finalWarFile));
        ZipFile zipFile = new ZipFile(tempFile);
        Enumeration<? extends ZipEntry> entries = zipFile.entries();
        while (entries.hasMoreElements()) {
            ZipEntry entry = entries.nextElement();
            if (acceptMime(entry)) {
                getLog().info("applying replacements for " + entry.getName());
                InputStream inputStream = zipFile.getInputStream(entry);
                String src = IOUtils.toString(inputStream, encoding);
                //do replacement
                for (Entry<String, String> e : replacements.entrySet()) {
                    src = src.replaceAll("#\\{" + e.getKey() + "}", e.getValue());
                }
                out.putNextEntry(new ZipEntry(entry.getName()));
                IOUtils.write(src, out, encoding);
                inputStream.close();
            } else {
                //not repalce, just put entry back to out zip
                out.putNextEntry(entry);
                InputStream inputStream = zipFile.getInputStream(entry);
                byte[] buf = new byte[512];
                int len = -1;
                while ((len = inputStream.read(buf)) > 0) {
                    out.write(buf, 0, len);
                }
                inputStream.close();
                continue;
            }
        }
        zipFile.close();
        out.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.flexive.core.storage.GenericDivisionExporter.java

/**
 * Dump a generic table to XML/*  w w w . ja va2  s.  co  m*/
 *
 * @param tableName     name of the table
 * @param stmt          an open statement
 * @param out           output stream
 * @param sb            an available and valid StringBuilder
 * @param xmlTag        name of the xml tag to write per row
 * @param idColumn      (optional) id column to sort results
 * @param onlyBinaries  process binary fields (else these will be ignored)
 * @throws SQLException on errors
 * @throws IOException  on errors
 */
private void dumpTable(String tableName, Statement stmt, OutputStream out, StringBuilder sb, String xmlTag,
        String idColumn, boolean onlyBinaries) throws SQLException, IOException {
    ResultSet rs = stmt.executeQuery("SELECT * FROM " + tableName
            + (StringUtils.isEmpty(idColumn) ? "" : " ORDER BY " + idColumn + " ASC"));
    final ResultSetMetaData md = rs.getMetaData();
    String value, att;
    boolean hasSubTags;
    while (rs.next()) {
        hasSubTags = false;
        if (!onlyBinaries) {
            sb.setLength(0);
            sb.append("  <").append(xmlTag);
        }
        for (int i = 1; i <= md.getColumnCount(); i++) {
            value = null;
            att = md.getColumnName(i).toLowerCase();
            switch (md.getColumnType(i)) {
            case java.sql.Types.DECIMAL:
            case java.sql.Types.NUMERIC:
            case java.sql.Types.BIGINT:
                if (!onlyBinaries) {
                    value = String.valueOf(rs.getBigDecimal(i));
                    if (rs.wasNull())
                        value = null;
                }
                break;
            case java.sql.Types.INTEGER:
            case java.sql.Types.SMALLINT:
            case java.sql.Types.TINYINT:
                if (!onlyBinaries) {
                    value = String.valueOf(rs.getLong(i));
                    if (rs.wasNull())
                        value = null;
                }
                break;
            case java.sql.Types.DOUBLE:
            case java.sql.Types.FLOAT:
            case java.sql.Types.REAL:
                if (!onlyBinaries) {
                    value = String.valueOf(rs.getDouble(i));
                    if (rs.wasNull())
                        value = null;
                }
                break;
            case java.sql.Types.TIMESTAMP:
            case java.sql.Types.DATE:
                if (!onlyBinaries) {
                    final Timestamp ts = rs.getTimestamp(i);
                    if (rs.wasNull())
                        value = null;
                    else
                        value = FxFormatUtils.getDateTimeFormat().format(ts);
                }
                break;
            case java.sql.Types.BIT:
            case java.sql.Types.CHAR:
            case java.sql.Types.BOOLEAN:
                if (!onlyBinaries) {
                    value = rs.getBoolean(i) ? "1" : "0";
                    if (rs.wasNull())
                        value = null;
                }
                break;
            case java.sql.Types.CLOB:
            case java.sql.Types.BLOB:
            case java.sql.Types.LONGVARBINARY:
            case java.sql.Types.LONGVARCHAR:
            case java.sql.Types.VARBINARY:
            case java.sql.Types.VARCHAR:
            case java.sql.Types.BINARY:
            case SQL_LONGNVARCHAR:
            case SQL_NCHAR:
            case SQL_NCLOB:
            case SQL_NVARCHAR:

                hasSubTags = true;
                break;
            default:
                LOG.warn("Unhandled type [" + md.getColumnType(i) + "] for [" + tableName + "." + att + "]");
            }
            if (value != null && !onlyBinaries)
                sb.append(' ').append(att).append("=\"").append(value).append("\"");
        }
        if (hasSubTags) {
            if (!onlyBinaries)
                sb.append(">\n");
            for (int i = 1; i <= md.getColumnCount(); i++) {
                switch (md.getColumnType(i)) {
                case java.sql.Types.VARBINARY:
                case java.sql.Types.LONGVARBINARY:
                case java.sql.Types.BLOB:
                case java.sql.Types.BINARY:
                    if (idColumn == null)
                        throw new IllegalArgumentException("Id column required to process binaries!");
                    String binFile = FOLDER_BINARY + "/BIN_" + String.valueOf(rs.getLong(idColumn)) + "_" + i
                            + ".blob";
                    att = md.getColumnName(i).toLowerCase();
                    if (onlyBinaries) {
                        if (!(out instanceof ZipOutputStream))
                            throw new IllegalArgumentException(
                                    "out has to be a ZipOutputStream to store binaries!");
                        ZipOutputStream zip = (ZipOutputStream) out;
                        InputStream in = rs.getBinaryStream(i);
                        if (rs.wasNull())
                            break;

                        ZipEntry ze = new ZipEntry(binFile);
                        zip.putNextEntry(ze);

                        byte[] buffer = new byte[4096];
                        int read;
                        while ((read = in.read(buffer)) != -1)
                            zip.write(buffer, 0, read);
                        in.close();
                        zip.closeEntry();
                        zip.flush();
                    } else {
                        InputStream in = rs.getBinaryStream(i); //need to fetch to see if it is empty
                        if (rs.wasNull())
                            break;
                        in.close();
                        sb.append("    <").append(att).append(">").append(binFile).append("</").append(att)
                                .append(">\n");
                    }
                    break;
                case java.sql.Types.CLOB:
                case SQL_LONGNVARCHAR:
                case SQL_NCHAR:
                case SQL_NCLOB:
                case SQL_NVARCHAR:
                case java.sql.Types.LONGVARCHAR:
                case java.sql.Types.VARCHAR:
                    if (!onlyBinaries) {
                        value = rs.getString(i);
                        if (rs.wasNull())
                            break;
                        att = md.getColumnName(i).toLowerCase();
                        sb.append("    <").append(att).append('>');
                        escape(sb, value);
                        sb.append("</").append(att).append(">\n");
                    }
                    break;
                }
            }
            if (!onlyBinaries)
                sb.append("  </").append(xmlTag).append(">\n");
        } else {
            if (!onlyBinaries)
                sb.append("/>\n");
        }
        if (!onlyBinaries)
            write(out, sb);
    }
}

From source file:com.netscape.cms.publish.publishers.FileBasedPublisher.java

/**
 * Publishes a object to the ldap directory.
 *
 * @param conn a Ldap connection/* w w  w  . j  a  va  2  s .  c  om*/
 *            (null if LDAP publishing is not enabled)
 * @param dn dn of the ldap entry to publish cert
 *            (null if LDAP publishing is not enabled)
 * @param object object to publish
 *            (java.security.cert.X509Certificate or,
 *            java.security.cert.X509CRL)
 */
public void publish(LDAPConnection conn, String dn, Object object) throws ELdapException {
    CMS.debug("FileBasedPublisher: publish");

    try {
        if (object instanceof X509Certificate) {
            X509Certificate cert = (X509Certificate) object;
            BigInteger sno = cert.getSerialNumber();
            String name = mDir + File.separator + "cert-" + sno.toString();
            if (mDerAttr) {
                FileOutputStream fos = null;
                try {
                    String fileName = name + ".der";
                    fos = new FileOutputStream(fileName);
                    fos.write(cert.getEncoded());
                } finally {
                    if (fos != null)
                        fos.close();
                }
            }
            if (mB64Attr) {
                String fileName = name + ".b64";
                PrintStream ps = null;
                Base64OutputStream b64 = null;
                FileOutputStream fos = null;
                try {
                    fos = new FileOutputStream(fileName);
                    ByteArrayOutputStream output = new ByteArrayOutputStream();
                    b64 = new Base64OutputStream(new PrintStream(new FilterOutputStream(output)));
                    b64.write(cert.getEncoded());
                    b64.flush();
                    ps = new PrintStream(fos);
                    ps.print(output.toString("8859_1"));
                } finally {
                    if (ps != null) {
                        ps.close();
                    }
                    if (b64 != null) {
                        b64.close();
                    }
                    if (fos != null)
                        fos.close();
                }
            }
        } else if (object instanceof X509CRL) {
            X509CRL crl = (X509CRL) object;
            String[] namePrefix = getCrlNamePrefix(crl, mTimeStamp.equals("GMT"));
            String baseName = mDir + File.separator + namePrefix[0];
            String tempFile = baseName + ".temp";
            ZipOutputStream zos = null;
            byte[] encodedArray = null;
            File destFile = null;
            String destName = null;
            File renameFile = null;

            if (mDerAttr) {
                FileOutputStream fos = null;
                try {
                    fos = new FileOutputStream(tempFile);
                    encodedArray = crl.getEncoded();
                    fos.write(encodedArray);
                } finally {
                    if (fos != null)
                        fos.close();
                }
                if (mZipCRL) {
                    try {
                        zos = new ZipOutputStream(new FileOutputStream(baseName + ".zip"));
                        zos.setLevel(mZipLevel);
                        zos.putNextEntry(new ZipEntry(baseName + ".der"));
                        zos.write(encodedArray, 0, encodedArray.length);
                        zos.closeEntry();
                    } finally {
                        if (zos != null)
                            zos.close();
                    }
                }
                destName = baseName + ".der";
                destFile = new File(destName);

                if (destFile.exists()) {
                    destFile.delete();
                }
                renameFile = new File(tempFile);
                renameFile.renameTo(destFile);

                if (mLatestCRL) {
                    String linkExt = ".";
                    if (mLinkExt != null && mLinkExt.length() > 0) {
                        linkExt += mLinkExt;
                    } else {
                        linkExt += "der";
                    }
                    String linkName = mDir + File.separator + namePrefix[1] + linkExt;
                    createLink(linkName, destName);
                    if (mZipCRL) {
                        linkName = mDir + File.separator + namePrefix[1] + ".zip";
                        createLink(linkName, baseName + ".zip");
                    }
                }
            }

            // output base64 file
            if (mB64Attr == true) {
                if (encodedArray == null)
                    encodedArray = crl.getEncoded();
                FileOutputStream fos = null;
                try {
                    fos = new FileOutputStream(tempFile);
                    fos.write(Utils.base64encode(encodedArray, true).getBytes());
                } finally {
                    if (fos != null)
                        fos.close();
                }
                destName = baseName + ".b64";
                destFile = new File(destName);

                if (destFile.exists()) {
                    destFile.delete();
                }
                renameFile = new File(tempFile);
                renameFile.renameTo(destFile);
            }
            purgeExpiredFiles();
            purgeExcessFiles();
        }
    } catch (IOException e) {
        mLogger.log(ILogger.EV_SYSTEM, ILogger.S_OTHER, ILogger.LL_FAILURE,
                CMS.getLogMessage("PUBLISH_FILE_PUBLISHER_ERROR", e.toString()));
    } catch (CertificateEncodingException e) {
        mLogger.log(ILogger.EV_SYSTEM, ILogger.S_OTHER, ILogger.LL_FAILURE,
                CMS.getLogMessage("PUBLISH_FILE_PUBLISHER_ERROR", e.toString()));
    } catch (CRLException e) {
        mLogger.log(ILogger.EV_SYSTEM, ILogger.S_OTHER, ILogger.LL_FAILURE,
                CMS.getLogMessage("PUBLISH_FILE_PUBLISHER_ERROR", e.toString()));
    }
}

From source file:edu.harvard.iq.dvn.ingest.dsb.impl.DvnRGraphServiceImpl.java

private void addZipEntry(RConnection c, ZipOutputStream zout, String inputFileName, String outputFileName)
        throws IOException {
    RFileInputStream tmpin = c.openFile(inputFileName);
    byte[] dataBuffer = new byte[8192];
    int i = 0;//from ww w .j a v a  2 s. c  o m

    ZipEntry e = new ZipEntry(outputFileName);
    zout.putNextEntry(e);

    while ((i = tmpin.read(dataBuffer)) > 0) {
        zout.write(dataBuffer, 0, i);
        zout.flush();
    }
    tmpin.close();
    zout.closeEntry();
}