Example usage for java.util.zip GZIPOutputStream GZIPOutputStream

List of usage examples for java.util.zip GZIPOutputStream GZIPOutputStream

Introduction

In this page you can find the example usage for java.util.zip GZIPOutputStream GZIPOutputStream.

Prototype

public GZIPOutputStream(OutputStream out) throws IOException 

Source Link

Document

Creates a new output stream with a default buffer size.

Usage

From source file:de.qaware.chronix.converter.common.Compression.java

/***
 * Compressed the given stream using gzip.
 *
 * @param stream the input stream//from ww  w  . ja  va2 s.  c o m
 * @return an byte[] with the compressed data from the stream
 */
public static byte[] compressFromStream(InputStream stream) {

    if (stream == null) {
        LOGGER.debug("Stream is null. Returning null.");
        return null;

    }

    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    OutputStream zippedStream = null;
    try {

        zippedStream = new GZIPOutputStream(byteArrayOutputStream);

        int nRead;
        byte[] data = new byte[16384];

        while ((nRead = stream.read(data, 0, data.length)) != -1) {
            zippedStream.write(data, 0, nRead);
        }
        zippedStream.flush();
        byteArrayOutputStream.flush();

    } catch (IOException e) {
        LOGGER.error("Exception occurred while compressing gzip stream.", e);
        return null;
    } finally {
        IOUtils.closeQuietly(zippedStream);
        IOUtils.closeQuietly(byteArrayOutputStream);
    }
    return byteArrayOutputStream.toByteArray();
}

From source file:edu.umn.cs.spatialHadoop.nasa.HDFRasterLayer.java

@Override
public void write(DataOutput out) throws IOException {
    super.write(out);
    out.writeLong(timestamp);/*from w  w w.j a  v  a2  s .  co m*/
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    GZIPOutputStream gzos = new GZIPOutputStream(baos);
    ByteBuffer bbuffer = ByteBuffer.allocate(getHeight() * 2 * 8 + 8);
    bbuffer.putInt(getWidth());
    bbuffer.putInt(getHeight());
    gzos.write(bbuffer.array(), 0, bbuffer.position());
    for (int x = 0; x < getWidth(); x++) {
        bbuffer.clear();
        for (int y = 0; y < getHeight(); y++) {
            bbuffer.putLong(sum[x][y]);
            bbuffer.putLong(count[x][y]);
        }
        gzos.write(bbuffer.array(), 0, bbuffer.position());
    }
    gzos.close();

    byte[] serializedData = baos.toByteArray();
    out.writeInt(serializedData.length);
    out.write(serializedData);
}

From source file:me.vertretungsplan.parser.DSBMobileParser.java

private static String encode(String input) throws IOException {
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    OutputStream gzipOs = new GZIPOutputStream(os);
    gzipOs.write(input.getBytes(ENCODING));
    gzipOs.close();/*w ww.java  2  s .c om*/
    byte[] outputBytes = os.toByteArray();
    return Base64.encodeBase64String(outputBytes);
}

From source file:GZIPUtils.java

/**
 * Returns an gzipped copy of the input array.
 *///from  w w  w . j  av  a 2 s  . com
public static final byte[] zip(byte[] in) {
    try {
        // compress using GZIPOutputStream 
        ByteArrayOutputStream byteOut = new ByteArrayOutputStream(in.length / EXPECTED_COMPRESSION_RATIO);

        GZIPOutputStream outStream = new GZIPOutputStream(byteOut);

        try {
            outStream.write(in);
        } catch (Exception e) {

        }

        try {
            outStream.close();
        } catch (IOException e) {

        }

        return byteOut.toByteArray();

    } catch (IOException e) {

        return null;
    }
}

From source file:com.intuit.tank.persistence.databases.MetricsCalculator.java

/**
 * @param object2/*from  ww  w  . jav  a 2 s  .  co  m*/
 * @param start
 * @{inheritDoc
 */
public void retrieveAndCalculateTimingData(@Nonnull String jobId, Date start, Date end) {
    MethodTimer mt = new MethodTimer(LOG, this.getClass(), "retrieveAndCalculateSummaryTimingCsv");
    int period = 15;
    Writer csvFile = null;
    CSVWriter csvWriter = null;
    InputStream is = null;
    try {
        ResultsReader resultsReader = ReportingFactory.getResultsReader();
        String fileName = "timing_" + new TankConfig().getInstanceName() + "_" + jobId + ".csv.gz";
        File f = File.createTempFile("timing", ".csv.gz");
        csvFile = new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(f)));
        csvWriter = new CSVWriter(csvFile);
        int count = 0;
        Object nextToken = null;
        csvWriter.writeNext(FIELDS);
        do {
            PagedTimingResults results = resultsReader.getPagedTimingResults(jobId, nextToken);
            for (TankResult result : results.getResults()) {
                count++;
                String[] entryArray = getCsvArray(result);
                csvWriter.writeNext(entryArray);
                if (count % 1000 == 0) {
                    csvWriter.flush();
                }

                double d = result.getResponseTime();
                if (!skipDate(result.getTimeStamp(), start, end)) {
                    DescriptiveStatistics statistics = summaryResults.get(result.getRequestName());
                    if (statistics == null) {
                        statistics = new DescriptiveStatistics();
                        summaryResults.put(result.getRequestName(), statistics);
                    }
                    statistics.addValue(d);
                }
                if (result.getTimeStamp() != null) {
                    Date periodDate = TimeUtil.normalizeToPeriod(period, result.getTimeStamp());
                    DescriptiveStatistics bucketStats = getBucketStats(result.getRequestName(), period,
                            periodDate);
                    bucketStats.addValue(d);
                }

            }
            nextToken = results.getNextToken();
        } while (nextToken != null);
        csvWriter.flush();
        csvWriter.close();
        csvWriter = null;
        IOUtils.closeQuietly(csvFile);
        FileStorage fileStorage = FileStorageFactory.getFileStorage(new TankConfig().getTimingDir(), false);
        FileData fd = new FileData("", fileName);
        is = new FileInputStream(f);
        fileStorage.storeFileData(fd, is);
        mt.endAndLog();
        LOG.info("Processed " + count + " total items for job " + jobId);
    } catch (Exception e) {
        if (e instanceof RuntimeException) {
            throw (RuntimeException) e;
        }
        throw new RuntimeException(e);
    } finally {
        if (csvWriter != null) {
            try {
                csvWriter.close();
            } catch (IOException e) {
                // swallow
                LOG.warn("Error closing csv file: " + e);
            }
        }
        IOUtils.closeQuietly(csvFile);
        IOUtils.closeQuietly(is);
    }
}

From source file:net.solarnetwork.node.support.JsonHttpClientSupport.java

/**
 * Perform a JSON HTTP request./*from   w  ww  .jav a  2s . c o  m*/
 * 
 * @param url
 *        the URL to make the request to
 * @param method
 *        the HTTP method, e.g. {@link HttpClientSupport#HTTP_METHOD_GET}
 * @param data
 *        the optional data to marshall to JSON and upload as the request
 *        content
 * @return the InputStream for the HTTP response
 * @throws IOException
 *         if any IO error occurs
 */
protected final InputStream doJson(String url, String method, Object data) throws IOException {
    URLConnection conn = getURLConnection(url, method, JSON_MIME_TYPE);
    if (data != null) {
        conn.setRequestProperty("Content-Type", JSON_MIME_TYPE + ";charset=UTF-8");
        if (compress) {
            conn.setRequestProperty("Content-Encoding", "gzip");
        }
        OutputStream out = conn.getOutputStream();
        if (compress) {
            out = new GZIPOutputStream(out);
        }

        if (log.isDebugEnabled()) {
            log.debug("Posting JSON data: {}",
                    objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(data));
        }
        objectMapper.writeValue(out, data);
        out.flush();
        out.close();
    }

    return getInputStreamFromURLConnection(conn);
}

From source file:ezbake.deployer.cli.commands.SSLCertsCommand.java

@Override
public void call() throws IOException, TException {
    String[] args = globalParameters.unparsedArgs;
    minExpectedArgs(2, args, this);
    String securityId = args[0];//from  w  ww .  j av  a  2  s  .c  o m
    String filePath = args[1];

    List<ArtifactDataEntry> certs = new ArrayList<>();
    EzSecurityRegistration.Client client = null;
    ThriftClientPool pool = poolSupplier.get();
    try {
        client = pool.getClient(EzSecurityRegistrationConstants.SERVICE_NAME,
                EzSecurityRegistration.Client.class);

        AppCerts s = client.getAppCerts(
                getSecurityToken(pool.getSecurityId(EzSecurityRegistrationConstants.SERVICE_NAME)), securityId);
        for (AppCerts._Fields fields : AppCerts._Fields.values()) {
            Object o = s.getFieldValue(fields);
            if (o instanceof byte[]) {
                String fieldName = fields.getFieldName().replace("_", ".");
                TarArchiveEntry tae = new TarArchiveEntry(
                        new File(new File(SSL_CONFIG_DIRECTORY, securityId), fieldName));
                certs.add(new ArtifactDataEntry(tae, (byte[]) o));
            }
        }

        ArchiveStreamFactory asf = new ArchiveStreamFactory();
        FileOutputStream fos = new FileOutputStream(filePath);
        GZIPOutputStream gzs = new GZIPOutputStream(fos);
        try (TarArchiveOutputStream aos = (TarArchiveOutputStream) asf
                .createArchiveOutputStream(ArchiveStreamFactory.TAR, gzs)) {
            aos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);

            for (ArtifactDataEntry entry : certs) {
                aos.putArchiveEntry(entry.getEntry());
                IOUtils.write(entry.getData(), aos);
                aos.closeArchiveEntry();
            }
            aos.finish();
            gzs.finish();
        } catch (ArchiveException ex) {
            throw new DeploymentException(ex.getMessage());
        } finally {
            IOUtils.closeQuietly(fos);
        }
    } finally {
        pool.returnToPool(client);
    }
}

From source file:NGzipCompressingEntity.java

public void writeTo(final OutputStream outstream) throws IOException {
    if (outstream == null) {
        throw new IllegalArgumentException("Output stream may not be null");
    }/* w  w  w  .  j  a va 2s  . c o m*/
    System.out.println("Writing gzip");
    GZIPOutputStream gzip = new GZIPOutputStream(outstream);
    InputStream in = wrappedEntity.getContent();
    byte[] tmp = new byte[2048];
    int l;
    while ((l = in.read(tmp)) != -1) {
        gzip.write(tmp, 0, l);
    }
    gzip.close();
}

From source file:com.panet.imeta.trans.steps.blockingstep.BlockingStep.java

private boolean addBuffer(RowMetaInterface rowMeta, Object[] r) {
    if (r != null) {
        data.buffer.add(r); // Save row
    }//  w  ww  .  jav  a  2  s  .co m

    // Time to write to disk: buffer in core is full!
    if (data.buffer.size() == meta.getCacheSize() // Buffer is full: dump to disk 
            || (data.files.size() > 0 && r == null && data.buffer.size() > 0) // No more records: join from disk 
    ) {
        // Then write them to disk...
        DataOutputStream dos;
        GZIPOutputStream gzos;
        int p;

        try {
            FileObject fileObject = KettleVFS.createTempFile(meta.getPrefix(), ".tmp",
                    environmentSubstitute(meta.getDirectory()));

            data.files.add(fileObject); // Remember the files!
            OutputStream outputStream = KettleVFS.getOutputStream(fileObject, false);
            if (meta.getCompress()) {
                gzos = new GZIPOutputStream(new BufferedOutputStream(outputStream));
                dos = new DataOutputStream(gzos);
            } else {
                dos = new DataOutputStream(outputStream);
                gzos = null;
            }

            // How many records do we have?
            dos.writeInt(data.buffer.size());

            for (p = 0; p < data.buffer.size(); p++) {
                // Just write the data, nothing else
                rowMeta.writeData(dos, (Object[]) data.buffer.get(p));
            }
            // Close temp-file
            dos.close(); // close data stream
            if (gzos != null) {
                gzos.close(); // close gzip stream
            }
            outputStream.close(); // close file stream
        } catch (Exception e) {
            logError("Error processing tmp-file: " + e.toString());
            return false;
        }

        data.buffer.clear();
    }

    return true;
}

From source file:com.ery.ertc.estorm.util.GZIPUtils.java

/**
 * Returns an gzipped copy of the input array.
 *///w w w  .j  a  v  a 2s  .  c o  m
public static final byte[] zip(byte[] in) {
    try {
        // compress using GZIPOutputStream
        ByteArrayOutputStream byteOut = new ByteArrayOutputStream(in.length / EXPECTED_COMPRESSION_RATIO);

        GZIPOutputStream outStream = new GZIPOutputStream(byteOut);

        try {
            outStream.write(in);
        } catch (Exception e) {
            LOG.error("Failed to get outStream.write input", e);
        }

        try {
            outStream.close();
        } catch (IOException e) {
            LOG.error("Failed to implement outStream.close", e);
        }

        return byteOut.toByteArray();

    } catch (IOException e) {
        LOG.error("Failed with IOException", e);
        return null;
    }
}