Example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream finish

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream finish

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream finish.

Prototype

public void finish() throws IOException 

Source Link

Document

Ends the TAR archive without closing the underlying OutputStream.

Usage

From source file:com.mulesoft.jockey.maven.GenerateMojo.java

private void createTarGz(File distDir) throws MojoExecutionException {
    File output = new File(buildDirectory, distributionName + ".tar.gz");
    try {/*from   w  w  w .j  a  v  a 2 s  .  c o  m*/
        final OutputStream out = new FileOutputStream(output);
        TarArchiveOutputStream os = new TarArchiveOutputStream(new GZIPOutputStream(out));
        os.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        copyArchiveFile(distDir, os, false);

        os.finish();

        os.close();
        out.close();
    } catch (IOException e) {
        throw new MojoExecutionException("Could not create zip file.", e);
    }
    projectHelper.attachArtifact(project, "tar.gz", "", output);
}

From source file:com.espringtran.compressor4j.processor.TarProcessor.java

/**
 * Compress data/*from w w  w  .  j ava  2s  . c  o m*/
 * 
 * @param fileCompressor
 *            FileCompressor object
 * @return
 * @throws Exception
 */
@Override
public byte[] compressData(FileCompressor fileCompressor) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    TarArchiveOutputStream aos = new TarArchiveOutputStream(baos);
    try {
        for (BinaryFile binaryFile : fileCompressor.getMapBinaryFile().values()) {
            TarArchiveEntry entry = new TarArchiveEntry(binaryFile.getDesPath());
            entry.setSize(binaryFile.getActualSize());
            aos.putArchiveEntry(entry);
            aos.write(binaryFile.getData());
            aos.closeArchiveEntry();
        }
        aos.flush();
        aos.finish();
    } catch (Exception e) {
        FileCompressor.LOGGER.error("Error on compress data", e);
    } finally {
        aos.close();
        baos.close();
    }
    return baos.toByteArray();
}

From source file:com.espringtran.compressor4j.processor.TarBz2Processor.java

/**
 * Compress data//from w  ww  .j a  va 2s.c  o m
 * 
 * @param fileCompressor
 *            FileCompressor object
 * @return
 * @throws Exception
 */
@Override
public byte[] compressData(FileCompressor fileCompressor) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BZip2CompressorOutputStream cos = new BZip2CompressorOutputStream(baos);
    TarArchiveOutputStream aos = new TarArchiveOutputStream(cos);
    try {
        for (BinaryFile binaryFile : fileCompressor.getMapBinaryFile().values()) {
            TarArchiveEntry entry = new TarArchiveEntry(binaryFile.getDesPath());
            entry.setSize(binaryFile.getActualSize());
            aos.putArchiveEntry(entry);
            aos.write(binaryFile.getData());
            aos.closeArchiveEntry();
        }
        aos.flush();
        aos.finish();
    } catch (Exception e) {
        FileCompressor.LOGGER.error("Error on compress data", e);
    } finally {
        aos.close();
        cos.close();
        baos.close();
    }
    return baos.toByteArray();
}

From source file:com.espringtran.compressor4j.processor.TarGzProcessor.java

/**
 * Compress data//from w ww .j  a va  2s .  c  om
 * 
 * @param fileCompressor
 *            FileCompressor object
 * @return
 * @throws Exception
 */
@Override
public byte[] compressData(FileCompressor fileCompressor) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    GzipCompressorOutputStream cos = new GzipCompressorOutputStream(baos);
    TarArchiveOutputStream aos = new TarArchiveOutputStream(cos);
    try {
        for (BinaryFile binaryFile : fileCompressor.getMapBinaryFile().values()) {
            TarArchiveEntry entry = new TarArchiveEntry(binaryFile.getDesPath());
            entry.setSize(binaryFile.getActualSize());
            aos.putArchiveEntry(entry);
            aos.write(binaryFile.getData());
            aos.closeArchiveEntry();
        }
        aos.flush();
        aos.finish();
    } catch (Exception e) {
        FileCompressor.LOGGER.error("Error on compress data", e);
    } finally {
        aos.close();
        cos.close();
        baos.close();
    }
    return baos.toByteArray();
}

From source file:com.espringtran.compressor4j.processor.XzProcessor.java

/**
 * Compress data/*from w w  w. ja v a  2 s . c  o  m*/
 * 
 * @param fileCompressor
 *            FileCompressor object
 * @return
 * @throws Exception
 */
@Override
public byte[] compressData(FileCompressor fileCompressor) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    XZCompressorOutputStream cos = new XZCompressorOutputStream(baos);
    TarArchiveOutputStream aos = new TarArchiveOutputStream(cos);
    try {
        for (BinaryFile binaryFile : fileCompressor.getMapBinaryFile().values()) {
            TarArchiveEntry entry = new TarArchiveEntry(binaryFile.getDesPath());
            entry.setSize(binaryFile.getActualSize());
            aos.putArchiveEntry(entry);
            aos.write(binaryFile.getData());
            aos.closeArchiveEntry();
        }
        aos.flush();
        aos.finish();
    } catch (Exception e) {
        FileCompressor.LOGGER.error("Error on compress data", e);
    } finally {
        aos.close();
        cos.close();
        baos.close();
    }
    return baos.toByteArray();
}

From source file:com.netflix.spinnaker.halyard.core.registry.v1.LocalDiskProfileReader.java

public InputStream readArchiveProfileFrom(Path profilePath) throws IOException {

    ByteArrayOutputStream os = new ByteArrayOutputStream();
    TarArchiveOutputStream tarArchive = new TarArchiveOutputStream(os);

    ArrayList<Path> filePathsToAdd = java.nio.file.Files
            .walk(profilePath, Integer.MAX_VALUE, FileVisitOption.FOLLOW_LINKS)
            .filter(path -> path.toFile().isFile()).collect(Collectors.toCollection(ArrayList::new));

    for (Path path : filePathsToAdd) {
        TarArchiveEntry tarEntry = new TarArchiveEntry(path.toFile(), profilePath.relativize(path).toString());
        tarArchive.putArchiveEntry(tarEntry);
        IOUtils.copy(Files.newInputStream(path), tarArchive);
        tarArchive.closeArchiveEntry();//from  ww w  . j  a va  2 s.  c o m
    }

    tarArchive.finish();
    tarArchive.close();

    return new ByteArrayInputStream(os.toByteArray());
}

From source file:de.uzk.hki.da.pkg.TarGZArchiveBuilder.java

public void archiveFolder(File srcFolder, File destFile, boolean includeFolder) throws Exception {

    FileOutputStream fOut = null;
    BufferedOutputStream bOut = null;
    GzipCompressorOutputStream gzOut = null;
    TarArchiveOutputStream tOut = null;

    try {//from w w w.j  a va  2 s  .  c  o  m
        fOut = new FileOutputStream(destFile);
        bOut = new BufferedOutputStream(fOut);
        gzOut = new GzipCompressorOutputStream(bOut);
        tOut = new TarArchiveOutputStream(gzOut);

        tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        tOut.setBigNumberMode(2);

        if (includeFolder)
            addFileToTarGZ(tOut, srcFolder, "");
        else {
            File children[] = srcFolder.listFiles();
            for (int i = 0; i < children.length; i++) {
                addFileToTarGZ(tOut, children[i], "");
            }
        }
    } finally {
        tOut.finish();

        tOut.close();
        gzOut.close();
        bOut.close();
        fOut.close();
    }
}

From source file:hudson.gridmaven.gridlayer.HadoopInstance.java

/**
 * This method opens a path and recursively adds all files into tar, then
 * inserts it to HDFS./* w  w w  .  j av  a2s.c  o  m*/
 */
public void tarAndInsert(String directoryPath, String tarGzPath) throws IOException {
    OutputStream fOut = null;
    //FileOutputStream fOut = null;
    BufferedOutputStream bOut = null;
    TarArchiveOutputStream tOut = null;
    //tarGzPath = "test.tar";
    //File f = new File(tarGzPath);
    Path f = new Path(tarGzPath);

    String skipDirectoryPath = "";
    File l = new File(directoryPath);
    if (l.isDirectory())
        skipDirectoryPath = directoryPath;
    try {
        fs.delete(f, true);
        fOut = fs.create(f);
        //fOut = new FileOutputStream(f);
        bOut = new BufferedOutputStream(fOut);
        tOut = new TarArchiveOutputStream(bOut);

        addFileToTar(tOut, directoryPath, "", skipDirectoryPath);

        tOut.finish();
        tOut.close();
        bOut.close();
        fOut.close();
    } catch (Exception e) {
        e.printStackTrace();
        Logger.getLogger(HadoopInstance.class.getName()).log(Level.SEVERE, null, e);
    }
}

From source file:ezbake.protect.ezca.EzCABootstrap.java

public static void createAndWriteTarball(String name, AppCerts certs, String filePath) {
    TarArchiveOutputStream os = null;
    try {//  w w  w . j  a va  2 s  .c o m
        File outputFile = new File(filePath, name + ".tar.gz");
        outputFile.createNewFile();
        outputFile.setWritable(false, false);
        outputFile.setWritable(true, true);
        outputFile.setReadable(false, false);
        outputFile.setReadable(true, true);
        FileOutputStream fos = new FileOutputStream(outputFile);

        CompressorOutputStream cos = new CompressorStreamFactory()
                .createCompressorOutputStream(CompressorStreamFactory.GZIP, fos);
        os = new TarArchiveOutputStream(cos);

        // For each field in the app certs, create an entry in the tar archive
        for (AppCerts._Fields field : AppCerts._Fields.values()) {
            Object o = certs.getFieldValue(field);
            if (o instanceof byte[]) {
                String fieldName = field.getFieldName().replace("_", ".");
                addTarArchiveEntry(os, fieldName, (byte[]) o);
            }
        }

    } catch (FileNotFoundException e) {
        logger.error("Unable to write tarball", e);
    } catch (CompressorException e) {
        logger.error("Error compressing tarball", e);
    } catch (IOException e) {
        logger.error("Error creating output file for tarball", e);
    } finally {
        if (os != null) {
            try {
                os.finish();
                os.close();
            } catch (IOException e) {
                logger.warn("Unable to close output stream", e);
            }
        }
    }
}

From source file:com.netflix.spinnaker.halyard.core.registry.v1.GitProfileReader.java

@Override
public InputStream readArchiveProfile(String artifactName, String version, String profileName)
        throws IOException {
    Path profilePath = Paths.get(profilePath(artifactName, version, profileName));

    ByteArrayOutputStream os = new ByteArrayOutputStream();
    TarArchiveOutputStream tarArchive = new TarArchiveOutputStream(os);

    ArrayList<Path> filePathsToAdd = java.nio.file.Files
            .walk(profilePath, Integer.MAX_VALUE, FileVisitOption.FOLLOW_LINKS)
            .filter(path -> path.toFile().isFile()).collect(Collectors.toCollection(ArrayList::new));

    for (Path path : filePathsToAdd) {
        TarArchiveEntry tarEntry = new TarArchiveEntry(path.toFile(), profilePath.relativize(path).toString());
        int permissions = FileModeUtils.getFileMode(Files.getPosixFilePermissions(path));
        permissions = FileModeUtils.setFileBit(permissions);
        tarEntry.setMode(permissions);// w w  w . ja va2 s  . co  m
        tarArchive.putArchiveEntry(tarEntry);
        IOUtils.copy(Files.newInputStream(path), tarArchive);
        tarArchive.closeArchiveEntry();
    }

    tarArchive.finish();
    tarArchive.close();

    return new ByteArrayInputStream(os.toByteArray());
}