Example usage for java.util.zip GZIPOutputStream GZIPOutputStream

List of usage examples for java.util.zip GZIPOutputStream GZIPOutputStream

Introduction

In this page you can find the example usage for java.util.zip GZIPOutputStream GZIPOutputStream.

Prototype

public GZIPOutputStream(OutputStream out) throws IOException 

Source Link

Document

Creates a new output stream with a default buffer size.

Usage

From source file:gobblin.metastore.ZkStateStore.java

@Override
public void putAll(String storeName, String tableName, Collection<T> states) throws IOException {
    try (ByteArrayOutputStream byteArrayOs = new ByteArrayOutputStream();
            OutputStream os = compressedValues ? new GZIPOutputStream(byteArrayOs) : byteArrayOs;
            DataOutputStream dataOutput = new DataOutputStream(os)) {

        for (T state : states) {
            addStateToDataOutputStream(dataOutput, state);
        }//from  w  w  w  .  j av a  2  s. c  o m

        dataOutput.close();
        putData(storeName, tableName, byteArrayOs.toByteArray());
    }
}

From source file:com.anrisoftware.prefdialog.spreadsheetimportdialog.dialog.SpreadsheetImportDialogWorker.java

/**
 * Returns the current layout of the dialog.
 */// ww w  .  j  a  v  a  2  s. co  m
public synchronized byte[] getCurrentLayout() {
    notNull(importDialog, "importDialog=null");
    SpreadsheetImportDialog dialog = importDialog.get();
    notNull(dialog, "dialog=null");
    try {
        ByteArrayOutputStream stream;
        stream = new ByteArrayOutputStream(1024);
        GZIPOutputStream zstream = new GZIPOutputStream(stream);
        dialog.saveLayout("default", zstream);
        currentLayout = stream.toByteArray();
    } catch (LayoutException e) {
        log.errorSaveLayout(dialog, e);
    } catch (IOException e) {
        log.errorSaveLayout(dialog, e);
    }
    return currentLayout;
}

From source file:com.oltpbenchmark.util.ResultUploader.java

public void uploadResult(List<TransactionType> activeTXTypes) throws ParseException {
    try {/*  w ww  .  j ava  2 s  .co m*/
        File expConfigFile = File.createTempFile("expconfig", ".tmp");
        File samplesFile = File.createTempFile("samples", ".tmp");
        File summaryFile = File.createTempFile("summary", ".tmp");
        File paramsFile = File.createTempFile("params", ".tmp");
        File metricsFile = File.createTempFile("metrics", ".tmp");
        File csvDataFile = File.createTempFile("csv", ".gz");

        PrintStream confOut = new PrintStream(new FileOutputStream(expConfigFile));
        writeBenchmarkConf(confOut);
        confOut.close();

        confOut = new PrintStream(new FileOutputStream(paramsFile));
        writeDBParameters(confOut);
        confOut.close();

        confOut = new PrintStream(new FileOutputStream(metricsFile));
        writeDBMetrics(confOut);
        confOut.close();

        confOut = new PrintStream(new FileOutputStream(samplesFile));
        results.writeCSV2(confOut);
        confOut.close();

        confOut = new PrintStream(new FileOutputStream(summaryFile));
        writeSummary(confOut);
        confOut.close();

        confOut = new PrintStream(new GZIPOutputStream(new FileOutputStream(csvDataFile)));
        results.writeAllCSVAbsoluteTiming(activeTXTypes, confOut);
        confOut.close();

        CloseableHttpClient httpclient = HttpClients.createDefault();
        HttpPost httppost = new HttpPost(uploadUrl);

        HttpEntity reqEntity = MultipartEntityBuilder.create().addTextBody("upload_code", uploadCode)
                .addPart("sample_data", new FileBody(samplesFile))
                .addPart("raw_data", new FileBody(csvDataFile))
                .addPart("db_parameters_data", new FileBody(paramsFile))
                .addPart("db_metrics_data", new FileBody(metricsFile))
                .addPart("benchmark_conf_data", new FileBody(expConfigFile))
                .addPart("summary_data", new FileBody(summaryFile)).build();

        httppost.setEntity(reqEntity);

        LOG.info("executing request " + httppost.getRequestLine());
        CloseableHttpResponse response = httpclient.execute(httppost);
        try {
            HttpEntity resEntity = response.getEntity();
            LOG.info(IOUtils.toString(resEntity.getContent()));
            EntityUtils.consume(resEntity);
        } finally {
            response.close();
        }
    } catch (IOException e) {
        e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
    } catch (ConfigurationException e) {
        e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
    }
}

From source file:edu.ucsd.xmlrpc.xmlrpc.server.XmlRpcStreamServer.java

/** Called to prepare the output stream. Typically used for enabling
 * compression, or similar filters./*from  w ww .  java  2s  .co m*/
 * @param pConnection The connection object.
 */
protected OutputStream getOutputStream(ServerStreamConnection pConnection, XmlRpcStreamRequestConfig pConfig,
        OutputStream pStream) throws IOException {
    if (pConfig.isEnabledForExtensions() && pConfig.isGzipRequesting()) {
        return new GZIPOutputStream(pStream);
    } else {
        return pStream;
    }
}

From source file:com.seer.datacruncher.utils.generic.CommonUtils.java

/**
 * GZIP encode.//from   ww  w  .  ja v a  2  s . com
 * 
 * @param str
 * @return
 * @throws IOException
 */
public synchronized static byte[] gzipEncode(String str) throws IOException {
    if (str == null || str.length() == 0) {
        return null;
    }
    ByteArrayOutputStream obj = new ByteArrayOutputStream();
    GZIPOutputStream gzip = new GZIPOutputStream(obj);
    gzip.write(str.getBytes("UTF-8"));
    gzip.close();
    return obj.toByteArray();
}

From source file:com.github.jasonruckman.gzip.AbstractBenchmark.java

protected byte[] doWriteSidney() {
    try {/*  w w  w .ja v  a  2 s .  c om*/
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        GZIPOutputStream gzos = new GZIPOutputStream(baos);
        Writer<T> writer = safeWriter.get();
        writer.open(gzos);
        for (T t : sampleData) {
            writer.write(t);
        }
        writer.close();
        gzos.close();
        return baos.toByteArray();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:ch.entwine.weblounge.maven.S3DeployMojo.java

/**
 * //from   w  w  w .j  a v  a  2  s  . c  o m
 * {@inheritDoc}
 * 
 * @see org.apache.maven.plugin.Mojo#execute()
 */
public void execute() throws MojoExecutionException, MojoFailureException {

    // Setup AWS S3 client
    AWSCredentials credentials = new BasicAWSCredentials(awsAccessKey, awsSecretKey);
    AmazonS3Client uploadClient = new AmazonS3Client(credentials);
    TransferManager transfers = new TransferManager(credentials);

    // Make sure key prefix does not start with a slash but has one at the
    // end
    if (keyPrefix.startsWith("/"))
        keyPrefix = keyPrefix.substring(1);
    if (!keyPrefix.endsWith("/"))
        keyPrefix = keyPrefix + "/";

    // Keep track of how much data has been transferred
    long totalBytesTransferred = 0L;
    int items = 0;
    Queue<Upload> uploads = new LinkedBlockingQueue<Upload>();

    try {
        // Check if S3 bucket exists
        getLog().debug("Checking whether bucket " + bucket + " exists");
        if (!uploadClient.doesBucketExist(bucket)) {
            getLog().error("Desired bucket '" + bucket + "' does not exist!");
            return;
        }

        getLog().debug("Collecting files to transfer from " + resources.getDirectory());
        List<File> res = getResources();
        for (File file : res) {
            // Make path of resource relative to resources directory
            String filename = file.getName();
            String extension = FilenameUtils.getExtension(filename);
            String path = file.getPath().substring(resources.getDirectory().length());
            String key = concat("/", keyPrefix, path).substring(1);

            // Delete old file version in bucket
            getLog().debug("Removing existing object at " + key);
            uploadClient.deleteObject(bucket, key);

            // Setup meta data
            ObjectMetadata meta = new ObjectMetadata();
            meta.setCacheControl("public, max-age=" + String.valueOf(valid * 3600));

            FileInputStream fis = null;
            GZIPOutputStream gzipos = null;
            final File fileToUpload;

            if (gzip && ("js".equals(extension) || "css".equals(extension))) {
                try {
                    fis = new FileInputStream(file);
                    File gzFile = File.createTempFile(file.getName(), null);
                    gzipos = new GZIPOutputStream(new FileOutputStream(gzFile));
                    IOUtils.copy(fis, gzipos);
                    fileToUpload = gzFile;
                    meta.setContentEncoding("gzip");
                    if ("js".equals(extension))
                        meta.setContentType("text/javascript");
                    if ("css".equals(extension))
                        meta.setContentType("text/css");
                } catch (FileNotFoundException e) {
                    getLog().error(e);
                    continue;
                } catch (IOException e) {
                    getLog().error(e);
                    continue;
                } finally {
                    IOUtils.closeQuietly(fis);
                    IOUtils.closeQuietly(gzipos);
                }
            } else {
                fileToUpload = file;
            }

            // Do a random check for existing errors before starting the next upload
            if (erroneousUpload != null)
                break;

            // Create put object request
            long bytesToTransfer = fileToUpload.length();
            totalBytesTransferred += bytesToTransfer;
            PutObjectRequest request = new PutObjectRequest(bucket, key, fileToUpload);
            request.setProgressListener(new UploadListener(credentials, bucket, key, bytesToTransfer));
            request.setMetadata(meta);

            // Schedule put object request
            getLog().info(
                    "Uploading " + key + " (" + FileUtils.byteCountToDisplaySize((int) bytesToTransfer) + ")");
            Upload upload = transfers.upload(request);
            uploads.add(upload);
            items++;
        }
    } catch (AmazonServiceException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    } catch (AmazonClientException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    }

    // Wait for uploads to be finished
    String currentUpload = null;
    try {
        Thread.sleep(1000);
        getLog().info("Waiting for " + uploads.size() + " uploads to finish...");
        while (!uploads.isEmpty()) {
            Upload upload = uploads.poll();
            currentUpload = upload.getDescription().substring("Uploading to ".length());
            if (TransferState.InProgress.equals(upload.getState()))
                getLog().debug("Waiting for upload " + currentUpload + " to finish");
            upload.waitForUploadResult();
        }
    } catch (AmazonServiceException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (AmazonClientException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (InterruptedException e) {
        getLog().debug("Interrupted while waiting for upload to finish");
    }

    // Check for errors that happened outside of the actual uploading
    if (erroneousUpload != null) {
        throw new MojoExecutionException("Error while uploading " + erroneousUpload);
    }

    getLog().info("Deployed " + items + " files ("
            + FileUtils.byteCountToDisplaySize((int) totalBytesTransferred) + ") to s3://" + bucket);
}

From source file:gov.nih.nci.firebird.test.FirebirdFileFactory.java

public FirebirdFile create() {
    try {/*  w w  w  .  jav  a  2s  .  c  o m*/
        byte[] data = getUniqueString(1024).getBytes();
        ByteArrayInputStream bin = new ByteArrayInputStream(data);
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        GZIPOutputStream zout = new GZIPOutputStream(bout);
        IOUtils.copy(bin, zout);
        IOUtils.closeQuietly(zout);
        IOUtils.closeQuietly(bout);
        IOUtils.closeQuietly(bin);
        byte[] compressedData = bout.toByteArray();
        return create(compressedData);
    } catch (IOException e) {
        throw new IllegalStateException("This shouldn't have happened", e);
    }
}

From source file:backtype.storm.utils.Utils.java

public static byte[] gzip(byte[] data) {
    try {/*from w w  w .  j av  a 2  s. c  o m*/
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        GZIPOutputStream out = new GZIPOutputStream(bos);
        out.write(data);
        out.close();
        return bos.toByteArray();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:edu.cornell.med.icb.goby.alignments.UpgradeTo1_9_6.java

private void upgradeHeaderVersion(String basename) throws IOException {
    InputStream headerStream;/*from  w ww  .  j ava 2  s . c o m*/
    try {
        headerStream = new GZIPInputStream(new RepositionableInputStream(basename + ".header"));
    } catch (IOException e) {
        // try not compressed for compatibility with 1.4-:
        LOG.trace("falling back to legacy 1.4- uncompressed header.");

        headerStream = new FileInputStream(basename + ".header");
    }
    // accept very large header messages, since these may contain query identifiers:
    final CodedInputStream codedInput = CodedInputStream.newInstance(headerStream);
    codedInput.setSizeLimit(Integer.MAX_VALUE);
    final Alignments.AlignmentHeader header = Alignments.AlignmentHeader.parseFrom(codedInput);

    Alignments.AlignmentHeader.Builder upgradedHeader = Alignments.AlignmentHeader.newBuilder(header);
    upgradedHeader.setVersion(VersionUtils.getImplementationVersion(UpgradeTo1_9_6.class));
    FileUtils.moveFile(new File(basename + ".header"),
            new File(makeBackFilename(basename + ".header", ".bak")));
    GZIPOutputStream headerOutput = new GZIPOutputStream(new FileOutputStream(basename + ".header"));
    try {
        upgradedHeader.build().writeTo(headerOutput);
    } finally {
        headerOutput.close();
    }
}