List of usage examples for java.util.zip Deflater Deflater
public Deflater(int level)
From source file:org.graylog.plugins.beats.BeatsFrameDecoderTest.java
private ChannelBuffer buildCompressedFrame(byte[] payload, int compressionLevel) { final Deflater deflater = new Deflater(compressionLevel); deflater.setInput(payload);//from ww w . j a va 2 s.co m deflater.finish(); final byte[] compressedPayload = new byte[1024]; final int compressedPayloadLength = deflater.deflate(compressedPayload); deflater.end(); final ChannelBuffer buffer = ChannelBuffers.buffer(6 + compressedPayloadLength); buffer.writeByte('2'); buffer.writeByte('C'); // Compressed payload length buffer.writeInt(compressedPayloadLength); // Compressed payload buffer.writeBytes(compressedPayload, 0, compressedPayloadLength); return buffer; }
From source file:org.diorite.impl.world.io.anvil.AnvilRegion.java
private NbtOutputStream getOutputStream(final int x, final int z, final byte version) throws IOException { this.checkBounds(x, z); if (version == VERSION_GZIP) { return new NbtOutputStream(new BufferedOutputStream(new GZIPOutputStream(new ChunkBuffer(x, z)))); }//from w w w .ja va 2s . c om if (version == VERSION_DEFLATE) { return new NbtOutputStream(new BufferedOutputStream( new DeflaterOutputStream(new ChunkBuffer(x, z), new Deflater(Deflater.BEST_SPEED)))); } throw new RuntimeException("Unknown version: " + version); }
From source file:PNGDecoder.java
/** main encoding method (stays blocked till encoding is finished). * @param image BufferedImage to encode//from w w w . java 2 s. c o m * @throws IOException IOException */ public void encode(BufferedImage image) throws IOException { int width = image.getWidth(null); int height = image.getHeight(null); final byte id[] = { -119, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13 }; write(id); crc.reset(); write("IHDR".getBytes()); write(width); write(height); byte head[] = null; switch (mode) { case BW_MODE: head = new byte[] { 1, 0, 0, 0, 0 }; break; case GREYSCALE_MODE: head = new byte[] { 8, 0, 0, 0, 0 }; break; case COLOR_MODE: head = new byte[] { 8, 2, 0, 0, 0 }; break; } write(head); write((int) crc.getValue()); ByteArrayOutputStream compressed = new ByteArrayOutputStream(65536); BufferedOutputStream bos = new BufferedOutputStream(new DeflaterOutputStream(compressed, new Deflater(9))); int pixel; int color; int colorset; switch (mode) { case BW_MODE: int rest = width % 8; int bytes = width / 8; for (int y = 0; y < height; y++) { bos.write(0); for (int x = 0; x < bytes; x++) { colorset = 0; for (int sh = 0; sh < 8; sh++) { pixel = image.getRGB(x * 8 + sh, y); color = ((pixel >> 16) & 0xff); color += ((pixel >> 8) & 0xff); color += (pixel & 0xff); colorset <<= 1; if (color >= 3 * 128) colorset |= 1; } bos.write((byte) colorset); } if (rest > 0) { colorset = 0; for (int sh = 0; sh < width % 8; sh++) { pixel = image.getRGB(bytes * 8 + sh, y); color = ((pixel >> 16) & 0xff); color += ((pixel >> 8) & 0xff); color += (pixel & 0xff); colorset <<= 1; if (color >= 3 * 128) colorset |= 1; } colorset <<= 8 - rest; bos.write((byte) colorset); } } break; case GREYSCALE_MODE: for (int y = 0; y < height; y++) { bos.write(0); for (int x = 0; x < width; x++) { pixel = image.getRGB(x, y); color = ((pixel >> 16) & 0xff); color += ((pixel >> 8) & 0xff); color += (pixel & 0xff); bos.write((byte) (color / 3)); } } break; case COLOR_MODE: for (int y = 0; y < height; y++) { bos.write(0); for (int x = 0; x < width; x++) { pixel = image.getRGB(x, y); bos.write((byte) ((pixel >> 16) & 0xff)); bos.write((byte) ((pixel >> 8) & 0xff)); bos.write((byte) (pixel & 0xff)); } } break; } bos.close(); write(compressed.size()); crc.reset(); write("IDAT".getBytes()); write(compressed.toByteArray()); write((int) crc.getValue()); write(0); crc.reset(); write("IEND".getBytes()); write((int) crc.getValue()); out.close(); }
From source file:com.nary.Debug.java
public static void saveClass(OutputStream _out, Object _class, boolean _compress) throws IOException { if (_compress) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream OOS = new ObjectOutputStream(bos); OOS.writeObject(_class); byte[] dataArray = bos.toByteArray(); byte[] test = new byte[dataArray.length]; // this is where the byte array gets compressed to Deflater def = new Deflater(Deflater.BEST_COMPRESSION); def.setInput(dataArray);//from ww w. ja v a 2s. c o m def.finish(); def.deflate(test); _out.write(test, 0, def.getTotalOut()); } else { ObjectOutputStream OS = new ObjectOutputStream(_out); OS.writeObject(_class); } }
From source file:PngEncoder.java
/** * Write the image data into the pngBytes array. * This will write one or more PNG "IDAT" chunks. In order * to conserve memory, this method grabs as many rows as will * fit into 32K bytes, or the whole image; whichever is less. * * * @return true if no errors; false if error grabbing pixels *///from ww w . j a v a2 s .com protected boolean writeImageData() { int rowsLeft = this.height; // number of rows remaining to write int startRow = 0; // starting row to process this time through int nRows; // how many rows to grab at a time byte[] scanLines; // the scan lines to be compressed int scanPos; // where we are in the scan lines int startPos; // where this line's actual pixels start (used // for filtering) byte[] compressedLines; // the resultant compressed lines int nCompressed; // how big is the compressed area? //int depth; // color depth ( handle only 8 or 32 ) PixelGrabber pg; this.bytesPerPixel = (this.encodeAlpha) ? 4 : 3; Deflater scrunch = new Deflater(this.compressionLevel); ByteArrayOutputStream outBytes = new ByteArrayOutputStream(1024); DeflaterOutputStream compBytes = new DeflaterOutputStream(outBytes, scrunch); try { while (rowsLeft > 0) { nRows = Math.min(32767 / (this.width * (this.bytesPerPixel + 1)), rowsLeft); nRows = Math.max(nRows, 1); int[] pixels = new int[this.width * nRows]; pg = new PixelGrabber(this.image, 0, startRow, this.width, nRows, pixels, 0, this.width); try { pg.grabPixels(); } catch (Exception e) { System.err.println("interrupted waiting for pixels!"); return false; } if ((pg.getStatus() & ImageObserver.ABORT) != 0) { System.err.println("image fetch aborted or errored"); return false; } /* * Create a data chunk. scanLines adds "nRows" for * the filter bytes. */ scanLines = new byte[this.width * nRows * this.bytesPerPixel + nRows]; if (this.filter == FILTER_SUB) { this.leftBytes = new byte[16]; } if (this.filter == FILTER_UP) { this.priorRow = new byte[this.width * this.bytesPerPixel]; } scanPos = 0; startPos = 1; for (int i = 0; i < this.width * nRows; i++) { if (i % this.width == 0) { scanLines[scanPos++] = (byte) this.filter; startPos = scanPos; } scanLines[scanPos++] = (byte) ((pixels[i] >> 16) & 0xff); scanLines[scanPos++] = (byte) ((pixels[i] >> 8) & 0xff); scanLines[scanPos++] = (byte) ((pixels[i]) & 0xff); if (this.encodeAlpha) { scanLines[scanPos++] = (byte) ((pixels[i] >> 24) & 0xff); } if ((i % this.width == this.width - 1) && (this.filter != FILTER_NONE)) { if (this.filter == FILTER_SUB) { filterSub(scanLines, startPos, this.width); } if (this.filter == FILTER_UP) { filterUp(scanLines, startPos, this.width); } } } /* * Write these lines to the output area */ compBytes.write(scanLines, 0, scanPos); startRow += nRows; rowsLeft -= nRows; } compBytes.close(); /* * Write the compressed bytes */ compressedLines = outBytes.toByteArray(); nCompressed = compressedLines.length; this.crc.reset(); this.bytePos = writeInt4(nCompressed, this.bytePos); this.bytePos = writeBytes(IDAT, this.bytePos); this.crc.update(IDAT); this.bytePos = writeBytes(compressedLines, nCompressed, this.bytePos); this.crc.update(compressedLines, 0, nCompressed); this.crcValue = this.crc.getValue(); this.bytePos = writeInt4((int) this.crcValue, this.bytePos); scrunch.finish(); scrunch.end(); return true; } catch (IOException e) { System.err.println(e.toString()); return false; } }
From source file:PngEncoder.java
/** * Writes the IDAT (Image data) chunks to the output stream. * * @param out the OutputStream to write the chunk to * @param csum the Checksum that is updated as data is written * to the passed-in OutputStream * @throws IOException if a problem is encountered writing the output *//*from www .j a v a 2s. c om*/ private void writeIdatChunks(OutputStream out, Checksum csum) throws IOException { int rowWidth = width * outputBpp; // size of image data in a row in bytes. int row = 0; Deflater deflater = new Deflater(compressionLevel); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); DeflaterOutputStream defOut = new DeflaterOutputStream(byteOut, deflater); byte[] filteredPixelQueue = new byte[rowWidth]; // Output Pixel Queues byte[][] outputPixelQueue = new byte[2][rowWidth]; Arrays.fill(outputPixelQueue[1], (byte) 0); int outputPixelQueueRow = 0; int outputPixelQueuePrevRow = 1; while (row < height) { if (filter == null) { defOut.write(0); translator.translate(outputPixelQueue[outputPixelQueueRow], row); defOut.write(outputPixelQueue[outputPixelQueueRow], 0, rowWidth); } else { defOut.write(filter.getType()); translator.translate(outputPixelQueue[outputPixelQueueRow], row); filter.filter(filteredPixelQueue, outputPixelQueue[outputPixelQueueRow], outputPixelQueue[outputPixelQueuePrevRow], outputBpp); defOut.write(filteredPixelQueue, 0, rowWidth); } ++row; outputPixelQueueRow = row & 1; outputPixelQueuePrevRow = outputPixelQueueRow ^ 1; } defOut.finish(); byteOut.close(); writeInt(out, byteOut.size()); csum.reset(); out.write(IDAT); byteOut.writeTo(out); writeInt(out, (int) csum.getValue()); }
From source file:org.pentaho.reporting.libraries.base.util.PngEncoder.java
/** * Write the image data into the pngBytes array. This will write one or more PNG "IDAT" chunks. In order to conserve * memory, this method grabs as many rows as will fit into 32K bytes, or the whole image; whichever is less. * * @return true if no errors; false if error grabbing pixels *///from www . ja va2 s.co m protected boolean writeImageData() { this.bytesPerPixel = (this.encodeAlpha) ? 4 : 3; final Deflater scrunch = new Deflater(this.compressionLevel); final ByteArrayOutputStream outBytes = new ByteArrayOutputStream(1024); final DeflaterOutputStream compBytes = new DeflaterOutputStream(outBytes, scrunch); try { int startRow = 0; // starting row to process this time through //noinspection SuspiciousNameCombination int rowsLeft = this.height; // number of rows remaining to write while (rowsLeft > 0) { final int nRows = Math.max(Math.min(32767 / (this.width * (this.bytesPerPixel + 1)), rowsLeft), 1); final int[] pixels = new int[this.width * nRows]; final PixelGrabber pg = new PixelGrabber(this.image, 0, startRow, this.width, nRows, pixels, 0, this.width); try { pg.grabPixels(); } catch (Exception e) { logger.error("interrupted waiting for pixels!", e); return false; } if ((pg.getStatus() & ImageObserver.ABORT) != 0) { logger.error("image fetch aborted or errored"); return false; } /* * Create a data chunk. scanLines adds "nRows" for * the filter bytes. */ final byte[] scanLines = new byte[this.width * nRows * this.bytesPerPixel + nRows]; if (this.filter == PngEncoder.FILTER_SUB) { this.leftBytes = new byte[16]; } if (this.filter == PngEncoder.FILTER_UP) { this.priorRow = new byte[this.width * this.bytesPerPixel]; } int scanPos = 0; int startPos = 1; for (int i = 0; i < this.width * nRows; i++) { if (i % this.width == 0) { scanLines[scanPos++] = (byte) this.filter; startPos = scanPos; } scanLines[scanPos++] = (byte) ((pixels[i] >> 16) & 0xff); scanLines[scanPos++] = (byte) ((pixels[i] >> 8) & 0xff); scanLines[scanPos++] = (byte) ((pixels[i]) & 0xff); if (this.encodeAlpha) { scanLines[scanPos++] = (byte) ((pixels[i] >> 24) & 0xff); } if ((i % this.width == this.width - 1) && (this.filter != PngEncoder.FILTER_NONE)) { if (this.filter == PngEncoder.FILTER_SUB) { filterSub(scanLines, startPos, this.width); } if (this.filter == PngEncoder.FILTER_UP) { filterUp(scanLines, startPos, this.width); } } } /* * Write these lines to the output area */ compBytes.write(scanLines, 0, scanPos); startRow += nRows; rowsLeft -= nRows; } compBytes.close(); /* * Write the compressed bytes */ final byte[] compressedLines = outBytes.toByteArray(); final int nCompressed = compressedLines.length; this.crc.reset(); this.bytePos = writeInt4(nCompressed, this.bytePos); this.bytePos = writeBytes(PngEncoder.IDAT, this.bytePos); this.crc.update(PngEncoder.IDAT); this.bytePos = writeBytes(compressedLines, nCompressed, this.bytePos); this.crc.update(compressedLines, 0, nCompressed); this.crcValue = this.crc.getValue(); this.bytePos = writeInt4((int) this.crcValue, this.bytePos); return true; } catch (IOException e) { logger.error("Failed to write PNG Data", e); return false; } finally { scrunch.finish(); scrunch.end(); } }
From source file:org.ajax4jsf.resource.ResourceBuilderImpl.java
protected byte[] encrypt(byte[] src) { try {/*from www.ja v a2 s.co m*/ Deflater compressor = new Deflater(Deflater.BEST_SPEED); byte[] compressed = new byte[src.length + 100]; compressor.setInput(src); compressor.finish(); int totalOut = compressor.deflate(compressed); byte[] zipsrc = new byte[totalOut]; System.arraycopy(compressed, 0, zipsrc, 0, totalOut); compressor.end(); return codec.encode(zipsrc); } catch (Exception e) { throw new FacesException("Error encode resource data", e); } }
From source file:org.apache.hadoop.hive.ql.exec.Utilities.java
private static Path setBaseWork(Configuration conf, BaseWork w, Path hiveScratchDir, String name, boolean useCache) { Kryo kryo = SerializationUtilities.borrowKryo(); try {//w w w. j ava 2s. c om setPlanPath(conf, hiveScratchDir); Path planPath = getPlanPath(conf, name); setHasWork(conf, name); OutputStream out = null; final long serializedSize; final String planMode; if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) { // add it to the conf ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); try { out = new DeflaterOutputStream(byteOut, new Deflater(Deflater.BEST_SPEED)); SerializationUtilities.serializePlan(kryo, w, out); out.close(); out = null; } finally { IOUtils.closeStream(out); } final String serializedPlan = Base64.encodeBase64String(byteOut.toByteArray()); serializedSize = serializedPlan.length(); planMode = "RPC"; conf.set(planPath.toUri().getPath(), serializedPlan); } else { // use the default file system of the conf FileSystem fs = planPath.getFileSystem(conf); try { out = fs.create(planPath); SerializationUtilities.serializePlan(kryo, w, out); out.close(); out = null; long fileLen = fs.getFileStatus(planPath).getLen(); serializedSize = fileLen; planMode = "FILE"; } finally { IOUtils.closeStream(out); } // Serialize the plan to the default hdfs instance // Except for hadoop local mode execution where we should be // able to get the plan directly from the cache if (useCache && !ShimLoader.getHadoopShims().isLocalMode(conf)) { // Set up distributed cache if (!DistributedCache.getSymlink(conf)) { DistributedCache.createSymlink(conf); } String uriWithLink = planPath.toUri().toString() + "#" + name; DistributedCache.addCacheFile(new URI(uriWithLink), conf); // set replication of the plan file to a high number. we use the same // replication factor as used by the hadoop jobclient for job.xml etc. short replication = (short) conf.getInt("mapred.submit.replication", 10); fs.setReplication(planPath, replication); } } LOG.info("Serialized plan (via {}) - name: {} size: {}", planMode, w.getName(), humanReadableByteCount(serializedSize)); // Cache the plan in this process gWorkMap.get(conf).put(planPath, w); return planPath; } catch (Exception e) { String msg = "Error caching " + name + ": " + e; LOG.error(msg, e); throw new RuntimeException(msg, e); } finally { SerializationUtilities.releaseKryo(kryo); } }
From source file:com.enonic.esl.xml.XMLTool.java
public static byte[] documentToDeflatedBytes(Document doc) { ByteArrayOutputStream out = new ByteArrayOutputStream(4096); Deflater deflater = new Deflater(Deflater.BEST_COMPRESSION); DeflaterOutputStream dos = new DeflaterOutputStream(out, deflater); printDocument(dos, doc, null, 0, false); try {/*from ww w. ja v a 2 s . c o m*/ dos.close(); } catch (IOException e) { throw new XMLToolException("Failed to close deflater output stream", e); } return out.toByteArray(); }