Example usage for java.util.zip ZipOutputStream write

List of usage examples for java.util.zip ZipOutputStream write

Introduction

In this page you can find the example usage for java.util.zip ZipOutputStream write.

Prototype

public synchronized void write(byte[] b, int off, int len) throws IOException 

Source Link

Document

Writes an array of bytes to the current ZIP entry data.

Usage

From source file:com.pr7.logging.CustomDailyRollingFileAppender.java

/**
 * Compresses the passed file to a .zip file, stores the .zip in the same
 * directory as the passed file, and then deletes the original, leaving only
 * the .zipped archive./*from  ww  w  . j a va2  s . c  o m*/
 * 
 * @param file
 */
private void zipAndDelete(File file) throws IOException {
    if (!file.getName().endsWith(".zip")) {
        System.out.println("zipAndDelete file = " + file.getName());
        String folderName = "archive";
        File folder = new File(file.getParent(), folderName);
        if (!folder.exists()) {
            if (folder.mkdir()) {
                System.out.println("Create " + folderName + " success.");
            } else {
                System.out.println("Create " + folderName + " failed.");
            }
        }

        File zipFile = new File(folder, file.getName() + ".zip");
        FileInputStream fis = new FileInputStream(file);
        FileOutputStream fos = new FileOutputStream(zipFile);
        ZipOutputStream zos = new ZipOutputStream(fos);
        ZipEntry zipEntry = new ZipEntry(file.getName());
        zos.putNextEntry(zipEntry);

        byte[] buffer = new byte[4096];
        while (true) {
            int bytesRead = fis.read(buffer);
            if (bytesRead == -1)
                break;
            else {
                zos.write(buffer, 0, bytesRead);
            }
        }
        zos.closeEntry();
        fis.close();
        zos.close();
        file.delete();
    }
}

From source file:com.fujitsu.dc.test.unit.cell.LogTest.java

private void createZip(ZipOutputStream zos, File[] files) throws IOException {
        byte[] buf = new byte[1024];
        InputStream is = null;//from w  w  w.  ja va2 s .c  o m
        try {
            for (File file : files) {
                ZipEntry entry = new ZipEntry(file.getName());
                zos.putNextEntry(entry);

                is = new BufferedInputStream(new FileInputStream(file));
                int len = 0;
                while ((len = is.read(buf)) != -1) {
                    zos.write(buf, 0, len);
                }
            }
        } finally {
            IOUtils.closeQuietly(is);
        }
    }

From source file:it.cnr.icar.eric.common.Utility.java

public static ZipOutputStream createZipOutputStream(String baseDir, String[] relativeFilePaths, OutputStream os)
        throws FileNotFoundException, IOException {
    if (baseDir.startsWith("file:/")) {
        baseDir = baseDir.substring(5);/*from  www.j  a va  2 s  .com*/
    }
    ZipOutputStream zipoutputstream = new ZipOutputStream(os);

    zipoutputstream.setMethod(ZipOutputStream.STORED);

    for (int i = 0; i < relativeFilePaths.length; i++) {
        File file = new File(baseDir + FILE_SEPARATOR + relativeFilePaths[i]);

        byte[] buffer = new byte[1000];

        int n;

        FileInputStream fis;

        // Calculate the CRC-32 value.  This isn't strictly necessary
        //   for deflated entries, but it doesn't hurt.

        CRC32 crc32 = new CRC32();

        fis = new FileInputStream(file);

        while ((n = fis.read(buffer)) > -1) {
            crc32.update(buffer, 0, n);
        }

        fis.close();

        // Create a zip entry.

        ZipEntry zipEntry = new ZipEntry(relativeFilePaths[i]);

        zipEntry.setSize(file.length());
        zipEntry.setTime(file.lastModified());
        zipEntry.setCrc(crc32.getValue());

        // Add the zip entry and associated data.

        zipoutputstream.putNextEntry(zipEntry);

        fis = new FileInputStream(file);

        while ((n = fis.read(buffer)) > -1) {
            zipoutputstream.write(buffer, 0, n);
        }

        fis.close();

        zipoutputstream.closeEntry();
    }

    return zipoutputstream;
}

From source file:fr.cirad.mgdb.exporting.individualoriented.DARwinExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles,
        boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor,
        Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    GenotypingProject aProject = mongoTemplate.findOne(
            new Query(Criteria.where(GenotypingProject.FIELDNAME_PLOIDY_LEVEL).exists(true)),
            GenotypingProject.class);
    if (aProject == null)
        LOG.warn("Unable to find a project containing ploidy level information! Assuming ploidy level is 2.");

    int ploidy = aProject == null ? 2 : aProject.getPloidyLevel();

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }//  www.  j a  va 2  s  .  co  m
        }

    String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size()
            + "individuals";

    StringBuffer donFileContents = new StringBuffer(
            "@DARwin 5.0 - DON -" + LINE_SEPARATOR + individualExportFiles.size() + "\t" + 1 + LINE_SEPARATOR
                    + "N" + "\t" + "individual" + LINE_SEPARATOR);

    int count = 0;
    String missingGenotype = "";
    for (int j = 0; j < ploidy; j++)
        missingGenotype += "\tN";

    zos.putNextEntry(new ZipEntry(exportName + ".var"));
    zos.write(("@DARwin 5.0 - ALLELIC - " + ploidy + LINE_SEPARATOR + individualExportFiles.size() + "\t"
            + markerCount * ploidy + LINE_SEPARATOR + "N").getBytes());

    DBCursor markerCursorCopy = markerCursor.copy(); // dunno how expensive this is, but seems safer than keeping all IDs in memory at any time

    short nProgress = 0, nPreviousProgress = 0;
    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    markerCursorCopy.batchSize(nChunkSize);

    int nMarkerIndex = 0;
    while (markerCursorCopy.hasNext()) {
        DBObject exportVariant = markerCursorCopy.next();
        Comparable markerId = (Comparable) exportVariant.get("_id");

        if (markerSynonyms != null) {
            Comparable syn = markerSynonyms.get(markerId);
            if (syn != null)
                markerId = syn;
        }
        for (int j = 0; j < ploidy; j++)
            zos.write(("\t" + markerId).getBytes());
    }

    TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>();
    ArrayList<String> distinctAlleles = new ArrayList<String>(); // the index of each allele will be used as its code
    int i = 0;
    for (File f : individualExportFiles) {
        BufferedReader in = new BufferedReader(new FileReader(f));
        try {
            String individualId, line = in.readLine(); // read sample id

            if (line != null)
                individualId = line;
            else
                throw new Exception("Unable to read first line of temp export file " + f.getName());

            donFileContents.append(++count + "\t" + individualId + LINE_SEPARATOR);

            zos.write((LINE_SEPARATOR + count).getBytes());
            nMarkerIndex = 0;

            while ((line = in.readLine()) != null) {
                List<String> genotypes = MgdbDao.split(line, "|");
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                int highestGenotypeCount = 0;
                String mostFrequentGenotype = null;
                for (String genotype : genotypes) {
                    if (genotype.length() == 0)
                        continue; /* skip missing genotypes */

                    int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                    if (gtCount > highestGenotypeCount) {
                        highestGenotypeCount = gtCount;
                        mostFrequentGenotype = genotype;
                    }
                    genotypeCounts.put(genotype, gtCount);
                }

                if (genotypeCounts.size() > 1) {
                    warningFileWriter.write("- Dissimilar genotypes found for variant __" + nMarkerIndex
                            + "__, individual " + individualId + ". Exporting most frequent: "
                            + mostFrequentGenotype + "\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                }

                String codedGenotype = "";
                if (mostFrequentGenotype != null)
                    for (String allele : mostFrequentGenotype.split(" ")) {
                        if (!distinctAlleles.contains(allele))
                            distinctAlleles.add(allele);
                        codedGenotype += "\t" + distinctAlleles.indexOf(allele);
                    }
                else
                    codedGenotype = missingGenotype.replaceAll("N", "-1"); // missing data is coded as -1
                zos.write(codedGenotype.getBytes());

                nMarkerIndex++;
            }
        } catch (Exception e) {
            LOG.error("Error exporting data", e);
            progress.setError("Error exporting data: " + e.getClass().getSimpleName()
                    + (e.getMessage() != null ? " - " + e.getMessage() : ""));
            return;
        } finally {
            in.close();
        }

        if (progress.hasAborted())
            return;

        nProgress = (short) (++i * 100 / individualExportFiles.size());
        if (nProgress > nPreviousProgress) {
            //            LOG.debug("============= doDARwinExport (" + i + "): " + nProgress + "% =============");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }

        if (!f.delete()) {
            f.deleteOnExit();
            LOG.info("Unable to delete tmp export file " + f.getAbsolutePath());
        }
    }

    zos.putNextEntry(new ZipEntry(exportName + ".don"));
    zos.write(donFileContents.toString().getBytes());

    // now read variant names for those that induced warnings
    nMarkerIndex = 0;
    markerCursor.batchSize(nChunkSize);
    while (markerCursor.hasNext()) {
        DBObject exportVariant = markerCursor.next();
        if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) {
            Comparable markerId = (Comparable) exportVariant.get("_id");

            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(markerId);
                if (syn != null)
                    markerId = syn;
            }
            for (int j = 0; j < ploidy; j++)
                zos.write(("\t" + markerId).getBytes());

            problematicMarkerIndexToNameMap.put(nMarkerIndex, markerId);
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet())
                sLine = sLine.replaceAll("__" + aMarkerIndex + "__",
                        problematicMarkerIndexToNameMap.get(aMarkerIndex).toString());
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:com.aurel.track.lucene.util.FileUtil.java

private static void zipFiles(File dirZip, ZipOutputStream zipOut, String rootPath) {
    try {// w  w w . ja  v a  2  s.com
        // get a listing of the directory content
        File[] dirList = dirZip.listFiles();
        byte[] readBuffer = new byte[2156];
        int bytesIn;
        // loop through dirList, and zip the files
        for (int i = 0; i < dirList.length; i++) {
            File f = dirList[i];
            if (f.isDirectory()) {
                // if the File object is a directory, call this
                // function again to add its content recursively
                String filePath = f.getAbsolutePath();
                zipFiles(new File(filePath), zipOut, rootPath);
                // loop again
                continue;
            }
            // if we reached here, the File object f was not a directory
            // create a FileInputStream on top of f
            FileInputStream fis = new FileInputStream(f);
            // create a new zip entry
            ZipEntry anEntry = new ZipEntry(
                    f.getAbsolutePath().substring(rootPath.length() + 1, f.getAbsolutePath().length()));
            // place the zip entry in the ZipOutputStream object
            zipOut.putNextEntry(anEntry);
            // now write the content of the file to the ZipOutputStream
            while ((bytesIn = fis.read(readBuffer)) != -1) {
                zipOut.write(readBuffer, 0, bytesIn);
            }
            // close the Stream
            fis.close();
        }

    } catch (Exception e) {
        LOGGER.error(ExceptionUtils.getStackTrace(e));
    }
}

From source file:com.gatf.executor.report.ReportHandler.java

/**
 * @param zipFile/*w  ww  . j a  v  a 2  s .  co m*/
 * @param directoryToExtractTo Provides file unzip functionality
 */
public static void zipDirectory(File directory, final String[] fileFilters, String zipFileName) {
    try {
        if (!directory.exists() || !directory.isDirectory()) {
            directory.mkdirs();
            logger.info("Invalid Directory provided for zipping...");
            return;
        }
        File zipFile = new File(directory, zipFileName);
        FileOutputStream fos = new FileOutputStream(zipFile);
        ZipOutputStream zos = new ZipOutputStream(fos);

        File[] files = directory.listFiles(new FilenameFilter() {
            public boolean accept(File folder, String name) {
                for (String fileFilter : fileFilters) {
                    return name.toLowerCase().endsWith(fileFilter);
                }
                return false;
            }
        });

        for (File file : files) {
            FileInputStream fis = new FileInputStream(file);
            ZipEntry zipEntry = new ZipEntry(file.getName());
            zos.putNextEntry(zipEntry);

            byte[] bytes = new byte[1024];
            int length;
            while ((length = fis.read(bytes)) >= 0) {
                zos.write(bytes, 0, length);
            }

            zos.closeEntry();
            fis.close();
        }

        zos.close();
        fos.close();
    } catch (IOException ioe) {
        logger.severe(ExceptionUtils.getStackTrace(ioe));
        return;
    }
}

From source file:fr.cirad.mgdb.exporting.markeroriented.HapMapExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }/*from w  ww.java2 s .  co m*/
        }

    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    ArrayList<String> individualList = new ArrayList<String>();
    for (int i = 0; i < sampleIDs.size(); i++) {
        Individual individual = individuals.get(i);
        if (!individualList.contains(individual.getId())) {
            individualList.add(individual.getId());
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".hapmap"));
    String header = "rs#" + "\t" + "alleles" + "\t" + "chrom" + "\t" + "pos" + "\t" + "strand" + "\t"
            + "assembly#" + "\t" + "center" + "\t" + "protLSID" + "\t" + "assayLSID" + "\t" + "panelLSID" + "\t"
            + "QCcode";
    zos.write(header.getBytes());
    for (int i = 0; i < individualList.size(); i++) {
        zos.write(("\t" + individualList.get(i)).getBytes());
    }
    zos.write((LINE_SEPARATOR).getBytes());

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    short nProgress = 0, nPreviousProgress = 0;
    long nLoadedMarkerCount = 0;

    while (markerCursor == null || markerCursor.hasNext()) {
        int nLoadedMarkerCountInLoop = 0;
        Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
        boolean fStartingNewChunk = true;
        markerCursor.batchSize(nChunkSize);
        while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
            DBObject exportVariant = markerCursor.next();
            DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
            markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                    refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                            + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
            nLoadedMarkerCountInLoop++;
            fStartingNewChunk = false;
        }

        List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
        LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                mongoTemplate, sampleIDs, currentMarkers, true,
                null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
        for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
        {
            Comparable variantId = variant.getId();
            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(variantId);
                if (syn != null)
                    variantId = syn;
            }

            boolean fIsSNP = variant.getType().equals(Type.SNP.toString());
            byte[] missingGenotype = ("\t" + "NN").getBytes();

            String[] chromAndPos = markerChromosomalPositions.get(variant.getId()).split(":");
            zos.write(((variantId == null ? variant.getId() : variantId) + "\t"
                    + StringUtils.join(variant.getKnownAlleleList(), "/") + "\t" + chromAndPos[0] + "\t"
                    + Long.parseLong(chromAndPos[1]) + "\t" + "+").getBytes());
            for (int j = 0; j < 6; j++)
                zos.write(("\t" + "NA").getBytes());

            Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
            Collection<VariantRunData> runs = variantsAndRuns.get(variant);
            if (runs != null)
                for (VariantRunData run : runs)
                    for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                        SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                        String gtCode = run.getSampleGenotypes().get(sampleIndex).getCode();
                        String individualId = individuals
                                .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                .getId();
                        List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                        if (storedIndividualGenotypes == null) {
                            storedIndividualGenotypes = new ArrayList<String>();
                            individualGenotypes.put(individualId, storedIndividualGenotypes);
                        }
                        storedIndividualGenotypes.add(gtCode);
                        gqValueForSampleId.put(individualId,
                                (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ));
                        dpValueForSampleId.put(individualId,
                                (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP));
                    }

            int writtenGenotypeCount = 0;
            for (String individualId : individualList /* we use this list because it has the proper ordering */) {
                int individualIndex = individualList.indexOf(individualId);
                while (writtenGenotypeCount < individualIndex - 1) {
                    zos.write(missingGenotype);
                    writtenGenotypeCount++;
                }

                List<String> genotypes = individualGenotypes.get(individualId);
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                int highestGenotypeCount = 0;
                String mostFrequentGenotype = null;
                if (genotypes != null)
                    for (String genotype : genotypes) {
                        if (genotype.length() == 0)
                            continue; /* skip missing genotypes */

                        Integer gqValue = gqValueForSampleId.get(individualId);
                        if (gqValue != null && gqValue < nMinimumGenotypeQuality)
                            continue; /* skip this sample because its GQ is under the threshold */

                        Integer dpValue = dpValueForSampleId.get(individualId);
                        if (dpValue != null && dpValue < nMinimumReadDepth)
                            continue; /* skip this sample because its DP is under the threshold */

                        int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                        if (gtCount > highestGenotypeCount) {
                            highestGenotypeCount = gtCount;
                            mostFrequentGenotype = genotype;
                        }
                        genotypeCounts.put(genotype, gtCount);
                    }

                byte[] exportedGT = mostFrequentGenotype == null ? missingGenotype
                        : ("\t" + StringUtils.join(variant.getAllelesFromGenotypeCode(mostFrequentGenotype),
                                fIsSNP ? "" : "/")).getBytes();
                zos.write(exportedGT);
                writtenGenotypeCount++;

                if (genotypeCounts.size() > 1)
                    warningFileWriter.write("- Dissimilar genotypes found for variant "
                            + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId
                            + ". Exporting most frequent: " + new String(exportedGT) + "\n");
            }

            while (writtenGenotypeCount < individualList.size()) {
                zos.write(missingGenotype);
                writtenGenotypeCount++;
            }
            zos.write((LINE_SEPARATOR).getBytes());
        }

        if (progress.hasAborted())
            return;

        nLoadedMarkerCount += nLoadedMarkerCountInLoop;
        nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
        if (nProgress > nPreviousProgress) {
            //            if (nProgress%5 == 0)
            //               LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:com.yunmel.syncretic.utils.io.IOUtils.java

/**
 * ?/*from  w w  w. j ava2 s. c o m*/
 * 
 * @param files 
 * @param out ?
 * @throws IOException
 * @throws Exception
 */
public static void zipDownLoad(Map<File, String> downQuene, HttpServletResponse response) throws IOException {

    ServletOutputStream out = response.getOutputStream();
    ZipOutputStream zipout = new ZipOutputStream(out);
    ZipEntry entry = null;
    zipout.setLevel(1);
    // zipout.setEncoding("GBK");
    if (downQuene != null && downQuene.size() > 0) {
        for (Entry<File, String> fileInfo : downQuene.entrySet()) {
            File file = fileInfo.getKey();
            try {
                String filename = new String(fileInfo.getValue().getBytes(), "GBK");
                entry = new ZipEntry(filename);
                entry.setSize(file.length());
                zipout.putNextEntry(entry);
            } catch (IOException e) {
                // Logger.getLogger(FileUtil.class).warn(":", e);
            }
            BufferedInputStream fr = new BufferedInputStream(new FileInputStream(fileInfo.getKey()));
            int len;
            byte[] buffer = new byte[1024];
            while ((len = fr.read(buffer)) != -1)
                zipout.write(buffer, 0, len);
            fr.close();
        }
    }

    zipout.finish();
    zipout.flush();
    // out.flush();
}

From source file:edu.ku.brc.specify.rstools.GoogleEarthExporter.java

/**
 * @param outputFile//from  w  w  w.  j  a v a2  s  .c  om
 * @param defaultIconFile
 */
protected void createKMZ(final File outputFile, final File defaultIconFile) throws IOException {
    // now we have the KML in outputFile
    // we need to create a KMZ (zip file containing doc.kml and other files)

    // create a buffer for reading the files
    byte[] buf = new byte[1024];
    int len;

    // create the KMZ file
    File outputKMZ = File.createTempFile("sp6-export-", ".kmz");
    ZipOutputStream out = new ZipOutputStream(new FileOutputStream(outputKMZ));

    // add the doc.kml file to the ZIP
    FileInputStream in = new FileInputStream(outputFile);
    // add ZIP entry to output stream
    out.putNextEntry(new ZipEntry("doc.kml"));
    // copy the bytes
    while ((len = in.read(buf)) > 0) {
        out.write(buf, 0, len);
    }
    // complete the entry
    out.closeEntry();
    in.close();

    // add a "files" directory to the KMZ file
    ZipEntry filesDir = new ZipEntry("files/");
    out.putNextEntry(filesDir);
    out.closeEntry();

    if (defaultIconFile != null) {
        File iconTmpFile = defaultIconFile;
        if (false) {
            // Shrink File
            ImageIcon icon = new ImageIcon(defaultIconFile.getAbsolutePath());
            BufferedImage bimage = new BufferedImage(icon.getIconWidth(), icon.getIconHeight(),
                    BufferedImage.TYPE_INT_ARGB);
            Graphics g = bimage.createGraphics();
            g.drawImage(icon.getImage(), 0, 0, null);
            g.dispose();
            BufferedImage scaledBI = GraphicsUtils.getScaledInstance(bimage, 16, 16, true);
            iconTmpFile = File.createTempFile("sp6-export-icon-scaled", ".png");
            ImageIO.write(scaledBI, "PNG", iconTmpFile);
        }

        // add the specify32.png file (default icon file) to the ZIP (in the "files" directory)
        in = new FileInputStream(iconTmpFile);
        // add ZIP entry to output stream
        out.putNextEntry(new ZipEntry("files/specify32.png"));
        // copy the bytes
        while ((len = in.read(buf)) > 0) {
            out.write(buf, 0, len);
        }
        // complete the entry
        out.closeEntry();
        in.close();
    }

    // complete the ZIP file
    out.close();

    // now put the KMZ file where the KML output was
    FileUtils.copyFile(outputKMZ, outputFile);

    outputKMZ.delete();
}

From source file:com.ephesoft.dcma.util.FileUtils.java

/**
 * This method zips the contents of Directory specified into a zip file whose name is provided.
 * //from ww w .j  ava 2 s .c om
 * @param dir {@link String}
 * @param zipfile {@link String}
 * @param excludeBatchXml boolean
 * @throws IOException 
 * @throws IllegalArgumentException in case of error
 */
public static void zipDirectory(final String dir, final String zipfile, final boolean excludeBatchXml)
        throws IOException, IllegalArgumentException {
    // Check that the directory is a directory, and get its contents
    File directory = new File(dir);
    if (!directory.isDirectory()) {
        throw new IllegalArgumentException("Not a directory:  " + dir);
    }
    String[] entries = directory.list();
    byte[] buffer = new byte[UtilConstants.BUFFER_CONST]; // Create a buffer for copying
    int bytesRead;

    ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zipfile));

    for (int index = 0; index < entries.length; index++) {
        if (excludeBatchXml && entries[index].contains(IUtilCommonConstants.BATCH_XML)) {
            continue;
        }
        File file = new File(directory, entries[index]);
        if (file.isDirectory()) {
            continue;// Ignore directory
        }
        FileInputStream input = new FileInputStream(file); // Stream to read file
        ZipEntry entry = new ZipEntry(file.getName()); // Make a ZipEntry
        out.putNextEntry(entry); // Store entry
        bytesRead = input.read(buffer);
        while (bytesRead != -1) {
            out.write(buffer, 0, bytesRead);
            bytesRead = input.read(buffer);
        }
        if (input != null) {
            input.close();
        }
    }
    if (out != null) {
        out.close();
    }
}