Example usage for java.util.zip GZIPOutputStream GZIPOutputStream

List of usage examples for java.util.zip GZIPOutputStream GZIPOutputStream

Introduction

In this page you can find the example usage for java.util.zip GZIPOutputStream GZIPOutputStream.

Prototype

public GZIPOutputStream(OutputStream out) throws IOException 

Source Link

Document

Creates a new output stream with a default buffer size.

Usage

From source file:com.google.api.ads.adwords.awreporting.processors.onmemory.ReportProcessorOnMemoryTest.java

private byte[] getReporDatafromCsv(ReportDefinitionReportType reportType) throws Exception {
    byte[] reportData = reportDataMap.get(reportType);
    if (reportData == null) {
        FileInputStream fis = new FileInputStream(getReportDataFileName(reportType));
        ByteArrayOutputStream baos = new ByteArrayOutputStream(2048);
        GZIPOutputStream gzipOut = new GZIPOutputStream(baos);
        FileUtil.copy(fis, gzipOut);/*from www.j  a v  a 2 s  .c om*/
        gzipOut.flush();
        gzipOut.close();
        reportData = baos.toByteArray();
        reportDataMap.put(reportType, reportData);
        baos.flush();
        baos.close();
    }
    return reportData;
}

From source file:co.aikar.timings.TimingsExport.java

@Override
public void run() {
    out.put("data", toArrayMapper(history, TimingHistory::export));

    String response = null;//from w  ww  .  j a v  a 2s .  co m
    String timingsURL = null;
    try {
        HttpURLConnection con = (HttpURLConnection) new URL("http://timings.aikar.co/post").openConnection();
        con.setDoOutput(true);
        String hostName = "BrokenHost";
        try {
            hostName = InetAddress.getLocalHost().getHostName();
        } catch (Exception ignored) {
        }
        con.setRequestProperty("User-Agent", "Paper/" + Bukkit.getServerName() + "/" + hostName);
        con.setRequestMethod("POST");
        con.setInstanceFollowRedirects(false);

        OutputStream request = new GZIPOutputStream(con.getOutputStream()) {
            {
                this.def.setLevel(7);
            }
        };

        request.write(JSONValue.toJSONString(out).getBytes("UTF-8"));
        request.close();

        response = getResponse(con);

        if (con.getResponseCode() != 302) {
            listeners.sendMessage(
                    ChatColor.RED + "Upload Error: " + con.getResponseCode() + ": " + con.getResponseMessage());
            listeners.sendMessage(ChatColor.RED + "Check your logs for more information");
            if (response != null) {
                Bukkit.getLogger().log(Level.SEVERE, response);
            }
            return;
        }

        timingsURL = con.getHeaderField("Location");
        listeners.sendMessage(ChatColor.GREEN + "View Timings Report: " + timingsURL);

        if (response != null && !response.isEmpty()) {
            Bukkit.getLogger().log(Level.INFO, "Timing Response: " + response);
        }
    } catch (IOException ex) {
        listeners.sendMessage(ChatColor.RED + "Error uploading timings, check your logs for more information");
        if (response != null) {
            Bukkit.getLogger().log(Level.SEVERE, response);
        }
        Bukkit.getLogger().log(Level.SEVERE, "Could not paste timings", ex);
    } finally {
        this.listeners.done(timingsURL);
    }
}

From source file:de.dentrassi.pm.deb.aspect.internal.RepoBuilder.java

private byte[] compressGzip(final byte[] data) throws IOException {
    final ByteArrayOutputStream bos = new ByteArrayOutputStream();
    final GZIPOutputStream gos = new GZIPOutputStream(bos);

    gos.write(data);/*from  ww  w . j  a  v a2s.  co m*/

    gos.close();
    return bos.toByteArray();
}

From source file:tr.com.turkcellteknoloji.turkcellupdater.Utilities.java

static byte[] compress(String string) throws IOException {
    ByteArrayOutputStream os = new ByteArrayOutputStream(string.length());
    GZIPOutputStream gos = new GZIPOutputStream(os);
    gos.write(string.getBytes("UTF-8"));
    gos.close();//  w ww  .  j a v a 2 s  .  co  m
    byte[] compressed = os.toByteArray();
    os.close();
    return compressed;
}

From source file:com.dilmus.dilshad.scabi.core.DComputeSync.java

public String executeCode(String bshSource) throws ClientProtocolException, IOException {
    HttpPost postRequest = new HttpPost("/Compute/Execute/BshCode");

    DMJson djson1 = new DMJson("TotalComputeUnit", "" + m_TU);
    DMJson djson2 = djson1.add("SplitComputeUnit", "" + m_SU);
    DMJson djson3 = djson2.add("JsonInput", "" + m_jsonStrInput);
    DMJson djson4 = djson3.add("BshSource", bshSource);

    log.debug("executeCode() m_jarFilePathList.size() : {}", m_jarFilePathList.size());
    if (m_jarFilePathList.size() > 0) {
        djson4 = addJars(djson4);//w  ww.  ja  va 2 s .  c o  m
        m_jarFilePathList.clear();
    }

    log.debug("executeCode() m_isComputeUnitJarsSet : {}", m_isComputeUnitJarsSet);
    if (m_isComputeUnitJarsSet) {
        djson4 = addComputeUnitJars(djson4);
        m_isComputeUnitJarsSet = false;
        m_dcl = null;
    }

    //StringEntity params = new StringEntity(djson4.toString());
    //postRequest.addHeader("content-type", "application/json");
    //postRequest.setEntity(params);

    postRequest.addHeader("Content-Encoding", "gzip");
    postRequest.addHeader("Accept-Encoding", "gzip");

    //=====================================================================
    ByteArrayOutputStream bytestream = new ByteArrayOutputStream();
    try (GZIPOutputStream gzipstream = new GZIPOutputStream(bytestream)) {
        gzipstream.write(djson4.toString().getBytes("UTF-8"));
    }
    byte[] gzipBytes = bytestream.toByteArray();
    bytestream.close();
    ByteArrayEntity byteEntity = new ByteArrayEntity(gzipBytes);
    postRequest.setEntity(byteEntity);
    //======================================================================

    log.debug("executeCode() executing request to " + m_target + "/Compute/Execute/BshCode");

    HttpResponse httpResponse = m_httpClient.execute(m_target, postRequest);
    HttpEntity entity = httpResponse.getEntity();

    /* Debugging
    log.debug("executeCode()----------------------------------------");
    log.debug("executeCode() {}",httpResponse.getStatusLine());
    Header[] headers = httpResponse.getAllHeaders();
    for (int i = 0; i < headers.length; i++) {
       log.debug("executeCode() {}", headers[i]);
    }
    log.debug("executeCode()----------------------------------------");
    */

    String jsonString = null;
    if (entity != null) {
        jsonString = EntityUtils.toString(entity);
        log.debug("executeCode() jsonString : {}", jsonString);
    }
    if (null == jsonString)
        return DMJson.error("null");

    return jsonString;
}

From source file:com.eucalyptus.blockstorage.S3SnapshotTransfer.java

/**
 * Compresses the snapshot and uploads it to a bucket in objectstorage gateway as a single or multipart upload based on the configuration in
 * {@link StorageInfo}. Bucket name should be configured before invoking this method. It can be looked up and initialized by {@link #prepareForUpload()} or
 * explicitly set using {@link #setBucketName(String)}
 * /*from  w w  w.  j  a  va  2 s  .c o m*/
 * @param sourceFileName
 *            absolute path to the snapshot on the file system
 */
@Override
public void upload(String sourceFileName) throws SnapshotTransferException {
    validateInput(); // Validate input
    loadTransferConfig(); // Load the transfer configuration parameters from database
    SnapshotProgressCallback progressCallback = new SnapshotProgressCallback(snapshotId); // Setup the progress callback

    Boolean error = Boolean.FALSE;
    ArrayBlockingQueue<SnapshotPart> partQueue = null;
    SnapshotPart part = null;
    SnapshotUploadInfo snapUploadInfo = null;
    Future<List<PartETag>> uploadPartsFuture = null;
    Future<String> completeUploadFuture = null;

    byte[] buffer = new byte[READ_BUFFER_SIZE];
    Long readOffset = 0L;
    Long bytesRead = 0L;
    Long bytesWritten = 0L;
    int len;
    int partNumber = 1;

    try {
        // Get the uncompressed file size for uploading as metadata
        Long uncompressedSize = getFileSize(sourceFileName);

        // Setup the snapshot and part entities.
        snapUploadInfo = SnapshotUploadInfo.create(snapshotId, bucketName, keyName);
        Path zipFilePath = Files.createTempFile(keyName + '-', '-' + String.valueOf(partNumber));
        part = SnapshotPart.createPart(snapUploadInfo, zipFilePath.toString(), partNumber, readOffset);

        FileInputStream inputStream = new FileInputStream(sourceFileName);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        GZIPOutputStream gzipStream = new GZIPOutputStream(baos);
        FileOutputStream outputStream = new FileOutputStream(zipFilePath.toString());

        try {
            LOG.debug("Reading snapshot " + snapshotId + " and compressing it to disk in chunks of size "
                    + partSize + " bytes or greater");
            while ((len = inputStream.read(buffer)) > 0) {
                bytesRead += len;
                gzipStream.write(buffer, 0, len);

                if ((bytesWritten + baos.size()) < partSize) {
                    baos.writeTo(outputStream);
                    bytesWritten += baos.size();
                    baos.reset();
                } else {
                    gzipStream.close();
                    baos.writeTo(outputStream); // Order is important. Closing the gzip stream flushes stuff
                    bytesWritten += baos.size();
                    baos.reset();
                    outputStream.close();

                    if (partNumber > 1) {// Update the part status
                        part = part.updateStateCreated(bytesWritten, bytesRead, Boolean.FALSE);
                    } else {// Initialize multipart upload only once after the first part is created
                        LOG.info("Uploading snapshot " + snapshotId
                                + " to objectstorage using multipart upload");
                        progressCallback.setUploadSize(uncompressedSize);
                        uploadId = initiateMulitpartUpload(uncompressedSize);
                        snapUploadInfo = snapUploadInfo.updateUploadId(uploadId);
                        part = part.updateStateCreated(uploadId, bytesWritten, bytesRead, Boolean.FALSE);
                        partQueue = new ArrayBlockingQueue<SnapshotPart>(queueSize);
                        uploadPartsFuture = Threads.enqueue(serviceConfig, UploadPartTask.class, poolSize,
                                new UploadPartTask(partQueue, progressCallback));
                    }

                    // Check for the future task before adding part to the queue.
                    if (uploadPartsFuture != null && uploadPartsFuture.isDone()) {
                        // This task shouldn't be done until the last part is added. If it is done at this point, then something might have gone wrong
                        throw new SnapshotUploadPartException(
                                "Error uploading parts, aborting part creation process. Check previous log messages for the exact error");
                    }

                    // Add part to the queue
                    partQueue.put(part);

                    // Prep the metadata for the next part
                    readOffset += bytesRead;
                    bytesRead = 0L;
                    bytesWritten = 0L;

                    // Setup the part entity for next part
                    zipFilePath = Files.createTempFile(keyName + '-', '-' + String.valueOf((++partNumber)));
                    part = SnapshotPart.createPart(snapUploadInfo, zipFilePath.toString(), partNumber,
                            readOffset);

                    gzipStream = new GZIPOutputStream(baos);
                    outputStream = new FileOutputStream(zipFilePath.toString());
                }
            }

            gzipStream.close();
            baos.writeTo(outputStream);
            bytesWritten += baos.size();
            baos.reset();
            outputStream.close();
            inputStream.close();

            // Update the part status
            part = part.updateStateCreated(bytesWritten, bytesRead, Boolean.TRUE);

            // Update the snapshot upload info status
            snapUploadInfo = snapUploadInfo.updateStateCreatedParts(partNumber);
        } catch (Exception e) {
            LOG.error("Failed to upload " + snapshotId + " due to: ", e);
            error = Boolean.TRUE;
            throw new SnapshotTransferException("Failed to upload " + snapshotId + " due to: ", e);
        } finally {
            if (inputStream != null) {
                inputStream.close();
            }
            if (gzipStream != null) {
                gzipStream.close();
            }
            if (outputStream != null) {
                outputStream.close();
            }
            baos.reset();
        }

        if (partNumber > 1) {
            // Check for the future task before adding the last part to the queue.
            if (uploadPartsFuture != null && uploadPartsFuture.isDone()) {
                // This task shouldn't be done until the last part is added. If it is done at this point, then something might have gone wrong
                throw new SnapshotUploadPartException(
                        "Error uploading parts, aborting part upload process. Check previous log messages for the exact error");
            }
            // Add the last part to the queue
            partQueue.put(part);
            // Kick off the completion task
            completeUploadFuture = Threads.enqueue(serviceConfig, CompleteMpuTask.class, poolSize,
                    new CompleteMpuTask(uploadPartsFuture, snapUploadInfo, partNumber));
        } else {
            try {
                LOG.info("Uploading snapshot " + snapshotId
                        + " to objectstorage as a single object. Compressed size of snapshot (" + bytesWritten
                        + " bytes) is less than minimum part size (" + partSize
                        + " bytes) for multipart upload");
                PutObjectResult putResult = uploadSnapshotAsSingleObject(zipFilePath.toString(), bytesWritten,
                        uncompressedSize, progressCallback);
                markSnapshotAvailable();
                try {
                    part = part.updateStateUploaded(putResult.getETag());
                    snapUploadInfo = snapUploadInfo.updateStateUploaded(putResult.getETag());
                } catch (Exception e) {
                    LOG.debug("Failed to update status in DB for " + snapUploadInfo);
                }
                LOG.info("Uploaded snapshot " + snapshotId + " to objectstorage");
            } catch (Exception e) {
                error = Boolean.TRUE;
                LOG.error("Failed to upload snapshot " + snapshotId + " due to: ", e);
                throw new SnapshotTransferException("Failed to upload snapshot " + snapshotId + " due to: ", e);
            } finally {
                deleteFile(zipFilePath);
            }
        }
    } catch (SnapshotTransferException e) {
        error = Boolean.TRUE;
        throw e;
    } catch (Exception e) {
        error = Boolean.TRUE;
        LOG.error("Failed to upload snapshot " + snapshotId + " due to: ", e);
        throw new SnapshotTransferException("Failed to upload snapshot " + snapshotId + " due to: ", e);
    } finally {
        if (error) {
            abortUpload(snapUploadInfo);
            if (uploadPartsFuture != null && !uploadPartsFuture.isDone()) {
                uploadPartsFuture.cancel(true);
            }
            if (completeUploadFuture != null && !completeUploadFuture.isDone()) {
                completeUploadFuture.cancel(true);
            }
        }
    }
}

From source file:com.st.maven.debian.DebianPackageMojo.java

private void fillControlTar(Config config, ArFileOutputStream output) throws MojoExecutionException {
    TarArchiveOutputStream tar = null;//www  .  j a v a  2s. c  om
    try {
        tar = new TarArchiveOutputStream(new GZIPOutputStream(new ArWrapper(output)));
        tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        TarArchiveEntry rootDir = new TarArchiveEntry("./");
        tar.putArchiveEntry(rootDir);
        tar.closeArchiveEntry();

        byte[] controlData = processTemplate(freemarkerConfig, config, "control.ftl");
        TarArchiveEntry controlEntry = new TarArchiveEntry("./control");
        controlEntry.setSize(controlData.length);
        tar.putArchiveEntry(controlEntry);
        tar.write(controlData);
        tar.closeArchiveEntry();

        byte[] preinstBaseData = processTemplate("preinst", freemarkerConfig, config,
                combine("preinst.ftl", BASE_DIR + File.separator + "preinst", false));
        long size = preinstBaseData.length;
        TarArchiveEntry preinstEntry = new TarArchiveEntry("./preinst");
        preinstEntry.setSize(size);
        preinstEntry.setMode(0755);
        tar.putArchiveEntry(preinstEntry);
        tar.write(preinstBaseData);
        tar.closeArchiveEntry();

        byte[] postinstBaseData = processTemplate("postinst", freemarkerConfig, config,
                combine("postinst.ftl", BASE_DIR + File.separator + "postinst", true));
        size = postinstBaseData.length;
        TarArchiveEntry postinstEntry = new TarArchiveEntry("./postinst");
        postinstEntry.setSize(size);
        postinstEntry.setMode(0755);
        tar.putArchiveEntry(postinstEntry);
        tar.write(postinstBaseData);
        tar.closeArchiveEntry();

        byte[] prermBaseData = processTemplate("prerm", freemarkerConfig, config,
                combine("prerm.ftl", BASE_DIR + File.separator + "prerm", false));
        size = prermBaseData.length;
        TarArchiveEntry prermEntry = new TarArchiveEntry("./prerm");
        prermEntry.setSize(size);
        prermEntry.setMode(0755);
        tar.putArchiveEntry(prermEntry);
        tar.write(prermBaseData);
        tar.closeArchiveEntry();

        byte[] postrmBaseData = processTemplate("postrm", freemarkerConfig, config,
                combine("postrm.ftl", BASE_DIR + File.separator + "postrm", false));
        size = postrmBaseData.length;
        TarArchiveEntry postrmEntry = new TarArchiveEntry("./postrm");
        postrmEntry.setSize(size);
        postrmEntry.setMode(0755);
        tar.putArchiveEntry(postrmEntry);
        tar.write(postrmBaseData);
        tar.closeArchiveEntry();

    } catch (Exception e) {
        throw new MojoExecutionException("unable to create control tar", e);
    } finally {
        if (tar != null) {
            try {
                tar.close();
            } catch (IOException e) {
                getLog().error("unable to finish tar", e);
            }
        }
    }
}

From source file:playground.christoph.evacuation.analysis.AgentsInMunicipalityEventsHandler.java

public void printInitialStatistics() {
    try {/*from   w w  w .  j a v  a 2s .c  o m*/
        fos = new FileOutputStream(outputFile + "_statistics.txt.gz");
        gzos = new GZIPOutputStream(fos);
        osw = new OutputStreamWriter(gzos, charset);
        bw = new BufferedWriter(osw);

        bw.write("residental people");
        bw.write(separator);
        bw.write(String.valueOf(this.residentAgents.size()));
        bw.write(newLine);

        bw.write("residental households");
        bw.write(separator);
        bw.write(String.valueOf(this.residentHouseholds.size()));
        bw.write(newLine);

        // HHTP statistics
        bw.write(newLine);
        bw.write("household HHTP");
        bw.write(separator);
        bw.write("number of households");
        bw.write(newLine);
        for (Entry<Integer, List<Id>> entry : this.residentHouseholdHHTPs.entrySet()) {
            bw.write(String.valueOf(entry.getKey()));
            bw.write(separator);
            bw.write(String.valueOf(entry.getValue().size()));
            bw.write(newLine);
        }

        bw.close();
        osw.close();
        gzos.close();
        fos.close();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:net.sf.mzmine.project.impl.StorableScan.java

/**
 * Open the proper type of buffered file depending on the .gz suffix
 * //from w  w  w .  j a  v a  2 s . c om
 * @param path
 * @param filedata
 * @return
 * @throws IOException
 */
private BufferedWriter openFile(String path) throws IOException {
    BufferedWriter fd;
    if (path.endsWith(".gz"))
        fd = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(path))));
    else
        fd = new BufferedWriter(new FileWriter(path));
    return fd;
}

From source file:com.dilmus.dilshad.scabi.core.async.DComputeNoBlock.java

public Future<HttpResponse> executeCode(String bshSource) throws ClientProtocolException, IOException {
    HttpPost postRequest = new HttpPost("/Compute/Execute/BshCode");

    DMJson djson1 = new DMJson("TotalComputeUnit", "" + m_TU);
    DMJson djson2 = djson1.add("SplitComputeUnit", "" + m_SU);
    DMJson djson3 = djson2.add("JsonInput", "" + m_jsonStrInput);
    DMJson djson4 = djson3.add("BshSource", bshSource);

    log.debug("executeCode() m_jarFilePathList.size() : {}", m_jarFilePathList.size());
    if (m_jarFilePathList.size() > 0) {
        djson4 = addJars(djson4);/* ww  w  . ja v  a2s .c  o  m*/
        m_jarFilePathList.clear();
    }

    log.debug("executeCode() m_isComputeUnitJarsSet : {}", m_isComputeUnitJarsSet);
    if (m_isComputeUnitJarsSet) {
        djson4 = addComputeUnitJars(djson4);
        m_isComputeUnitJarsSet = false;
        m_dcl = null;
    }

    //StringEntity params = new StringEntity(djson4.toString());
    //postRequest.addHeader("content-type", "application/json");
    //postRequest.setEntity(params);

    postRequest.addHeader("Content-Encoding", "gzip");
    postRequest.addHeader("Accept-Encoding", "gzip");

    //=====================================================================
    ByteArrayOutputStream bytestream = new ByteArrayOutputStream();
    try (GZIPOutputStream gzipstream = new GZIPOutputStream(bytestream)) {
        gzipstream.write(djson4.toString().getBytes("UTF-8"));
    }
    byte[] gzipBytes = bytestream.toByteArray();
    bytestream.close();
    ByteArrayEntity byteEntity = new ByteArrayEntity(gzipBytes);
    postRequest.setEntity(byteEntity);
    //======================================================================

    log.debug("executeCode() executing request to " + m_target + "/Compute/Execute/BshCode");
    incCountRequests();
    Future<HttpResponse> futureHttpResponse = m_httpClient.execute(m_target, postRequest, null);
    return futureHttpResponse;

}