Example usage for java.util.zip ZipOutputStream putNextEntry

List of usage examples for java.util.zip ZipOutputStream putNextEntry

Introduction

In this page you can find the example usage for java.util.zip ZipOutputStream putNextEntry.

Prototype

public void putNextEntry(ZipEntry e) throws IOException 

Source Link

Document

Begins writing a new ZIP file entry and positions the stream to the start of the entry data.

Usage

From source file:fr.cirad.mgdb.exporting.markeroriented.GFFExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }//w ww .ja  v  a 2s  . c  o  m
        }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    ArrayList<String> individualList = new ArrayList<String>();
    for (int i = 0; i < sampleIDs.size(); i++) {
        Individual individual = individuals.get(i);
        if (!individualList.contains(individual.getId())) {
            individualList.add(individual.getId());
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".gff3"));
    String header = "##gff-version 3" + LINE_SEPARATOR;
    zos.write(header.getBytes());

    TreeMap<String, String> typeToOntology = new TreeMap<String, String>();
    typeToOntology.put(Type.SNP.toString(), "SO:0000694");
    typeToOntology.put(Type.INDEL.toString(), "SO:1000032");
    typeToOntology.put(Type.MIXED.toString(), "SO:0001059");
    typeToOntology.put(Type.SYMBOLIC.toString(), "SO:0000109");
    typeToOntology.put(Type.MNP.toString(), "SO:0001059");

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    short nProgress = 0, nPreviousProgress = 0;
    long nLoadedMarkerCount = 0;

    while (markerCursor.hasNext()) {
        int nLoadedMarkerCountInLoop = 0;
        Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
        boolean fStartingNewChunk = true;
        markerCursor.batchSize(nChunkSize);
        while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
            DBObject exportVariant = markerCursor.next();
            DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
            markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                    refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                            + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
            nLoadedMarkerCountInLoop++;
            fStartingNewChunk = false;
        }

        List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
        LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                mongoTemplate, sampleIDs, currentMarkers, true,
                null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
        for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
        {
            Comparable variantId = variant.getId();
            List<String> variantDataOrigin = new ArrayList<String>();

            Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
            List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":");
            if (chromAndPos.size() == 0)
                LOG.warn("Chromosomal position not found for marker " + variantId);
            // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR);
            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(variantId);
                if (syn != null)
                    variantId = syn;
            }

            Collection<VariantRunData> runs = variantsAndRuns.get(variant);
            if (runs != null)
                for (VariantRunData run : runs)
                    for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                        SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                        String individualId = individuals
                                .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                .getId();

                        Integer gq = null;
                        try {
                            gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ);
                        } catch (Exception ignored) {
                        }
                        if (gq != null && gq < nMinimumGenotypeQuality)
                            continue;

                        Integer dp = null;
                        try {
                            dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP);
                        } catch (Exception ignored) {
                        }
                        if (dp != null && dp < nMinimumReadDepth)
                            continue;

                        String gtCode = sampleGenotype.getCode();
                        List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                        if (storedIndividualGenotypes == null) {
                            storedIndividualGenotypes = new ArrayList<String>();
                            individualGenotypes.put(individualId, storedIndividualGenotypes);
                        }
                        storedIndividualGenotypes.add(gtCode);
                    }

            zos.write((chromAndPos.get(0) + "\t" + StringUtils.join(variantDataOrigin, ";") /*source*/ + "\t"
                    + typeToOntology.get(variant.getType()) + "\t" + Long.parseLong(chromAndPos.get(1)) + "\t"
                    + Long.parseLong(chromAndPos.get(1)) + "\t" + "." + "\t" + "+" + "\t" + "." + "\t")
                            .getBytes());
            Comparable syn = markerSynonyms == null ? null : markerSynonyms.get(variant.getId());
            zos.write(("ID=" + variant.getId() + ";" + (syn != null ? "Name=" + syn + ";" : "") + "alleles="
                    + StringUtils.join(variant.getKnownAlleleList(), "/") + ";" + "refallele="
                    + variant.getKnownAlleleList().get(0) + ";").getBytes());

            for (int j = 0; j < individualList
                    .size(); j++ /* we use this list because it has the proper ordering*/) {

                NumberFormat nf = NumberFormat.getInstance(Locale.US);
                nf.setMaximumFractionDigits(4);
                HashMap<String, Integer> compt1 = new HashMap<String, Integer>();
                int highestGenotypeCount = 0;
                int sum = 0;

                String individualId = individualList.get(j);
                List<String> genotypes = individualGenotypes.get(individualId);
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes

                String mostFrequentGenotype = null;
                if (genotypes != null)
                    for (String genotype : genotypes) {
                        if (genotype.length() == 0)
                            continue; /* skip missing genotypes */

                        int count = 0;
                        for (String t : variant.getAllelesFromGenotypeCode(genotype)) {
                            for (String t1 : variant.getKnownAlleleList()) {
                                if (t.equals(t1) && !(compt1.containsKey(t1))) {
                                    count++;
                                    compt1.put(t1, count);
                                } else if (t.equals(t1) && compt1.containsKey(t1)) {
                                    if (compt1.get(t1) != 0) {
                                        count++;
                                        compt1.put(t1, count);
                                    } else
                                        compt1.put(t1, count);
                                } else if (!(compt1.containsKey(t1))) {
                                    compt1.put(t1, 0);
                                }
                            }
                        }
                        for (int countValue : compt1.values()) {
                            sum += countValue;
                        }

                        int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                        if (gtCount > highestGenotypeCount) {
                            highestGenotypeCount = gtCount;
                            mostFrequentGenotype = genotype;
                        }
                        genotypeCounts.put(genotype, gtCount);
                    }

                List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>()
                        : variant.getAllelesFromGenotypeCode(mostFrequentGenotype);

                if (alleles.size() != 0) {
                    zos.write(("acounts=" + individualId + ":").getBytes());

                    for (String knowAllelesCompt : compt1.keySet()) {
                        zos.write(
                                (knowAllelesCompt + " " + nf.format(compt1.get(knowAllelesCompt) / (float) sum)
                                        + " " + compt1.get(knowAllelesCompt) + " ").getBytes());
                    }
                    zos.write((alleles.size() + ";").getBytes());
                }
                if (genotypeCounts.size() > 1) {
                    Comparable sVariantId = markerSynonyms != null ? markerSynonyms.get(variant.getId())
                            : variant.getId();
                    warningFileWriter.write("- Dissimilar genotypes found for variant "
                            + (sVariantId == null ? variant.getId() : sVariantId) + ", individual "
                            + individualId + ". Exporting most frequent: " + StringUtils.join(alleles, ",")
                            + "\n");
                }
            }
            zos.write((LINE_SEPARATOR).getBytes());
        }

        if (progress.hasAborted())
            return;

        nLoadedMarkerCount += nLoadedMarkerCountInLoop;
        nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
        if (nProgress > nPreviousProgress) {
            //            if (nProgress%5 == 0)
            //               LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:org.pdfgal.pdfgalweb.utils.impl.ZipUtilsImpl.java

/**
 * Adds a new file to the {@link ZipOutputStream}.
 * //  ww w.  jav  a2s  . c  o  m
 * @param fileName
 * @param zos
 * @param originalFileName
 * @throws FileNotFoundException
 * @throws IOException
 */
private void addToZipFile(final String fileName, final ZipOutputStream zos, final String originalFileName)
        throws IOException {

    // File is opened.
    final File file = new File(fileName);

    // File is renamed.
    final String newName = fileName.substring(fileName.indexOf(originalFileName), fileName.length());
    final File renamedFile = new File(newName);
    file.renameTo(renamedFile);

    // File is included into ZIP.
    final FileInputStream fis = new FileInputStream(renamedFile);
    final ZipEntry zipEntry = new ZipEntry(newName);
    zos.putNextEntry(zipEntry);

    final byte[] bytes = new byte[1024];
    int length;
    while ((length = fis.read(bytes)) >= 0) {
        zos.write(bytes, 0, length);
    }

    // Closing elements.
    zos.closeEntry();
    fis.close();

    // File is deleted
    this.fileUtils.delete(newName);
}

From source file:org.openremote.beehive.configuration.www.UsersAPI.java

private void writeZipEntry(ZipOutputStream zipOutput, File file, java.nio.file.Path basePath)
        throws IOException {
    ZipEntry entry = new ZipEntry(basePath.relativize(file.toPath()).toString());
    entry.setSize(file.length());//ww w. j  a  v  a 2 s.c o  m
    entry.setTime(file.lastModified());
    zipOutput.putNextEntry(entry);

    IOUtils.copy(new FileInputStream(file), zipOutput);

    zipOutput.flush();
    zipOutput.closeEntry();
}

From source file:com.qwazr.tools.ArchiverTool.java

public void addToZipFile(String entryName, String filePath, ZipOutputStream zos) throws IOException {

    File srcFile = new File(filePath);
    if (!srcFile.exists())
        throw new FileNotFoundException("The file does not exists: " + srcFile.getPath());
    FileInputStream fis = new FileInputStream(srcFile);
    try {//w  w  w. j av a  2 s  .  com
        ZipEntry zipEntry = new ZipEntry(entryName);
        zos.putNextEntry(zipEntry);
        IOUtils.copy(fis, zos);
        zos.closeEntry();
    } finally {
        IOUtils.closeQuietly(fis);
    }
}

From source file:com.asual.summer.bundle.BundleDescriptorMojo.java

private void zip(File directory, File base, ZipOutputStream zos) throws IOException {
    File[] files = directory.listFiles();
    byte[] buffer = new byte[8192];
    int read = 0;
    for (int i = 0, n = files.length; i < n; i++) {
        String name = files[i].getPath().replace(File.separatorChar, '/')
                .substring(base.getPath().length() + 1);
        if (files[i].isDirectory()) {
            if (!name.endsWith("/")) {
                name = name + "/";
            }/* www.j a  v a 2 s .  co  m*/
            ZipEntry entry = new ZipEntry(name);
            zos.putNextEntry(entry);
            zip(files[i], base, zos);
        } else {
            FileInputStream in = new FileInputStream(files[i]);
            ZipEntry entry = new ZipEntry(name);
            zos.putNextEntry(entry);
            while (-1 != (read = in.read(buffer))) {
                zos.write(buffer, 0, read);
            }
            in.close();
        }
    }
}

From source file:dpfmanager.shell.interfaces.console.CommonController.java

private void compressDirectoryToZipfile(String rootDir, String sourceDir, ZipOutputStream out)
        throws IOException, FileNotFoundException {
    for (File file : new File(sourceDir).listFiles()) {
        if (file.isDirectory()) {
            compressDirectoryToZipfile(rootDir, sourceDir + file.getName() + File.separator, out);
        } else {/*from w  w w  . j  a va2s  .co m*/
            ZipEntry entry = new ZipEntry(sourceDir.replace(rootDir, "") + file.getName());
            out.putNextEntry(entry);

            FileInputStream in = new FileInputStream(sourceDir + file.getName());
            IOUtils.copy(in, out);
            IOUtils.closeQuietly(in);
        }
    }
}

From source file:com.wavemaker.tools.project.StageDeploymentManager.java

protected void assembleEar(Map<String, Object> properties, com.wavemaker.tools.io.File warFile) {
    ZipOutputStream out;
    InputStream is;/*from w w w.j a va  2  s .co  m*/
    try {
        com.wavemaker.tools.io.File earFile = (com.wavemaker.tools.io.File) properties
                .get(EAR_FILE_NAME_PROPERTY);
        out = new ZipOutputStream(earFile.getContent().asOutputStream());
        out.putNextEntry(new ZipEntry(warFile.getName()));
        is = warFile.getContent().asInputStream();
        org.apache.commons.io.IOUtils.copy(is, out);
        out.closeEntry();
        is.close();

        Folder webInf = ((Folder) properties.get(BUILD_WEBAPPROOT_PROPERTY)).getFolder("WEB-INF");
        com.wavemaker.tools.io.File appXml = webInf.getFile("application.xml");
        out.putNextEntry(new ZipEntry("META-INF/" + appXml.getName()));
        is = appXml.getContent().asInputStream();
        org.apache.commons.io.IOUtils.copy(is, out);
        out.closeEntry();
        is.close();

        String maniFest = "Manifest-Version: 1.0\n" + "Created-By: WaveMaker Studio (CloudJee Inc.)";
        out.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF"));
        org.apache.commons.io.IOUtils.write(maniFest, out);
        out.closeEntry();
        is.close();
        out.close();
    } catch (IOException ex) {
        throw new WMRuntimeException(ex);
    }
}

From source file:com.liferay.ide.server.remote.AbstractRemoteServerPublisher.java

public IPath publishModuleDelta(String archiveName, IModuleResourceDelta[] deltas, String deletePrefix,
        boolean adjustGMTOffset) throws CoreException {
    IPath path = LiferayServerCore.getTempLocation("partial-war", archiveName); //$NON-NLS-1$

    FileOutputStream outputStream = null;
    ZipOutputStream zip = null;
    File warfile = path.toFile();

    warfile.getParentFile().mkdirs();/*from   w w w.j a  v  a  2s  .c  om*/

    try {
        outputStream = new FileOutputStream(warfile);
        zip = new ZipOutputStream(outputStream);

        Map<ZipEntry, String> deleteEntries = new HashMap<ZipEntry, String>();

        processResourceDeltas(deltas, zip, deleteEntries, deletePrefix, StringPool.EMPTY, adjustGMTOffset);

        for (ZipEntry entry : deleteEntries.keySet()) {
            zip.putNextEntry(entry);
            zip.write(deleteEntries.get(entry).getBytes());
        }

        // if ((removedResources != null) && (removedResources.size() > 0)) {
        // writeRemovedResources(removedResources, zip);
        // }
    } catch (Exception ex) {
        ex.printStackTrace();
    } finally {
        if (zip != null) {
            try {
                zip.close();
            } catch (IOException localIOException1) {

            }
        }
    }

    return new Path(warfile.getAbsolutePath());
}

From source file:com.metamx.druid.loading.S3SegmentPusher.java

@Override
public DataSegment push(File file, DataSegment segment) throws IOException {
    log.info("Uploading [%s] to S3", file);
    String outputKey = JOINER.join(config.getBaseKey().isEmpty() ? null : config.getBaseKey(),
            segment.getDataSource(),/*from   ww  w  . ja v  a  2 s.c  o  m*/
            String.format("%s_%s", segment.getInterval().getStart(), segment.getInterval().getEnd()),
            segment.getVersion(), segment.getShardSpec().getPartitionNum());

    File indexFilesDir = file;

    long indexSize = 0;
    final File zipOutFile = File.createTempFile("druid", "index.zip");
    ZipOutputStream zipOut = null;
    try {
        zipOut = new ZipOutputStream(new FileOutputStream(zipOutFile));
        File[] indexFiles = indexFilesDir.listFiles();
        for (File indexFile : indexFiles) {
            log.info("Adding indexFile[%s] with size[%,d].  Total size[%,d]", indexFile, indexFile.length(),
                    indexSize);
            if (indexFile.length() >= Integer.MAX_VALUE) {
                throw new ISE("indexFile[%s] too large [%,d]", indexFile, indexFile.length());
            }
            zipOut.putNextEntry(new ZipEntry(indexFile.getName()));
            IOUtils.copy(new FileInputStream(indexFile), zipOut);
            indexSize += indexFile.length();
        }
    } finally {
        Closeables.closeQuietly(zipOut);
    }

    try {
        S3Object toPush = new S3Object(zipOutFile);

        final String outputBucket = config.getBucket();
        toPush.setBucketName(outputBucket);
        toPush.setKey(outputKey + "/index.zip");

        log.info("Pushing %s.", toPush);
        s3Client.putObject(outputBucket, toPush);

        DataSegment outputSegment = segment.withSize(indexSize).withLoadSpec(ImmutableMap
                .<String, Object>of("type", "s3_zip", "bucket", outputBucket, "key", toPush.getKey()));

        File descriptorFile = File.createTempFile("druid", "descriptor.json");
        StreamUtils.copyToFileAndClose(new ByteArrayInputStream(jsonMapper.writeValueAsBytes(segment)),
                descriptorFile);
        S3Object descriptorObject = new S3Object(descriptorFile);
        descriptorObject.setBucketName(outputBucket);
        descriptorObject.setKey(outputKey + "/descriptor.json");

        log.info("Pushing %s", descriptorObject);
        s3Client.putObject(outputBucket, descriptorObject);

        log.info("Deleting Index File[%s]", indexFilesDir);
        FileUtils.deleteDirectory(indexFilesDir);

        log.info("Deleting zipped index File[%s]", zipOutFile);
        zipOutFile.delete();

        log.info("Deleting descriptor file[%s]", descriptorFile);
        descriptorFile.delete();

        return outputSegment;
    } catch (NoSuchAlgorithmException e) {
        throw new IOException(e);
    } catch (S3ServiceException e) {
        throw new IOException(e);
    }
}

From source file:com.cisco.ca.cstg.pdi.services.ConfigurationServiceImpl.java

private void addToZipOutputStream(String filePath, ZipOutputStream zip) throws IOException {
    byte[] buf = new byte[1024];
    int len;//from  www .jav a 2s.c  o m
    File input = new File(filePath);
    try (FileInputStream in = new FileInputStream(input)) {
        zip.putNextEntry(new ZipEntry(input.getName()));
        while ((len = in.read(buf)) > 0) {
            zip.write(buf, 0, len);
        }
        zip.flush();
    }
}