List of usage examples for java.io DataInputStream readFully
public final void readFully(byte b[], int off, int len) throws IOException
From source file:org.openamf.io.AMFDeserializer.java
/** * This is a hacked verison of Java's DataInputStream.readUTF(), which only * supports Strings <= 65535 UTF-8-encoded characters *//*from w ww . j a va 2s . c o m*/ private Object readLongUTF(DataInputStream in) throws IOException { int utflen = in.readInt(); StringBuffer str = new StringBuffer(utflen); byte bytearr[] = new byte[utflen]; int c, char2, char3; int count = 0; in.readFully(bytearr, 0, utflen); while (count < utflen) { c = (int) bytearr[count] & 0xff; switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: /* 0xxxxxxx*/ count++; str.append((char) c); break; case 12: case 13: /* 110x xxxx 10xx xxxx*/ count += 2; if (count > utflen) throw new UTFDataFormatException(); char2 = (int) bytearr[count - 1]; if ((char2 & 0xC0) != 0x80) throw new UTFDataFormatException(); str.append((char) (((c & 0x1F) << 6) | (char2 & 0x3F))); break; case 14: /* 1110 xxxx 10xx xxxx 10xx xxxx */ count += 3; if (count > utflen) throw new UTFDataFormatException(); char2 = (int) bytearr[count - 2]; char3 = (int) bytearr[count - 1]; if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) throw new UTFDataFormatException(); str.append((char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | ((char3 & 0x3F) << 0))); break; default: /* 10xx xxxx, 1111 xxxx */ throw new UTFDataFormatException(); } } // The number of chars produced may be less than utflen return new String(str); }
From source file:org.godhuli.rhipe.RHMR.java
public void readParametersFromR(String configfile) throws IOException { FileInputStream in = new FileInputStream(configfile); DataInputStream fin = new DataInputStream(in); byte[] d;//from w ww.j a va 2 s . c o m String key, value; int n0 = fin.readInt(), n; for (int i = 0; i < n0; i++) { // R Writes Null Terminated Strings(when I dont use char2Raw) try { n = fin.readInt(); d = new byte[n]; fin.readFully(d, 0, d.length); key = new String(d); n = fin.readInt(); d = new byte[n]; fin.readFully(d, 0, d.length); value = new String(d); rhoptions_.put(key, value); } catch (EOFException e) { throw new IOException(e); } } fin.close(); if (debug_) { Enumeration keys = rhoptions_.keys(); while (keys.hasMoreElements()) { String key0 = (String) keys.nextElement(); String value0 = (String) rhoptions_.get(key0); System.out.println(key0 + "=" + value0); } } }
From source file:com.exadel.flamingo.flex.messaging.amf.io.AMF0Deserializer.java
/** * This is a hacked verison of Java's DataInputStream.readUTF(), which only * supports Strings <= 65535 UTF-8-encoded characters */// ww w .java2 s.c o m private Object readLongUTF(DataInputStream in) throws IOException { int utflen = in.readInt(); StringBuffer str = new StringBuffer(utflen); byte bytearr[] = new byte[utflen]; int c, char2, char3; int count = 0; in.readFully(bytearr, 0, utflen); while (count < utflen) { c = bytearr[count] & 0xff; switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: /* 0xxxxxxx*/ count++; str.append((char) c); break; case 12: case 13: /* 110x xxxx 10xx xxxx*/ count += 2; if (count > utflen) throw new UTFDataFormatException(); char2 = bytearr[count - 1]; if ((char2 & 0xC0) != 0x80) throw new UTFDataFormatException(); str.append((char) (((c & 0x1F) << 6) | (char2 & 0x3F))); break; case 14: /* 1110 xxxx 10xx xxxx 10xx xxxx */ count += 3; if (count > utflen) throw new UTFDataFormatException(); char2 = bytearr[count - 2]; char3 = bytearr[count - 1]; if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) throw new UTFDataFormatException(); str.append((char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | ((char3 & 0x3F) << 0))); break; default: /* 10xx xxxx, 1111 xxxx */ throw new UTFDataFormatException(); } } // The number of chars produced may be less than utflen return new String(str); }
From source file:com.trigger_context.Main_Service.java
private void sendFile(DataOutputStream out, String Path) { Log.i(Main_Service.LOG_TAG, "SendFile--Start"); File infile = new File(Path); String FileName = null;/*from www . j a v a 2 s .c om*/ try { FileName = Path.substring(Path.lastIndexOf("/") + 1); out.writeUTF(FileName); out.writeLong(infile.length()); } catch (IOException e) { Log.i(Main_Service.LOG_TAG, "SendFile--error sending filename length"); } byte[] mybytearray = new byte[(int) infile.length()]; FileInputStream fis = null; ; try { fis = new FileInputStream(infile); } catch (FileNotFoundException e1) { Log.i(Main_Service.LOG_TAG, "sendFile--Error file not found"); } BufferedInputStream bis = new BufferedInputStream(fis); DataInputStream dis = new DataInputStream(bis); try { dis.readFully(mybytearray, 0, mybytearray.length); } catch (IOException e1) { Log.i(Main_Service.LOG_TAG, "sendFile--Error while reading bytes from file"); } try { out.write(mybytearray, 0, mybytearray.length); } catch (IOException e1) { Log.i(Main_Service.LOG_TAG, "sendFile--error while sending"); } try { dis.close(); bis.close(); fis.close(); } catch (IOException e) { Log.i(Main_Service.LOG_TAG, "sendFile--error in closing streams"); } }
From source file:org.apache.fop.render.pdf.ImageRawPNGAdapter.java
/** {@inheritDoc} */ public void outputContents(OutputStream out) throws IOException { InputStream in = ((ImageRawStream) image).createInputStream(); try {//from w w w. j a va 2s . co m if (numberOfInterleavedComponents == 1 || numberOfInterleavedComponents == 3) { // means we have Gray, RGB, or Palette IOUtils.copy(in, out); } else { // means we have Gray + alpha or RGB + alpha // TODO: since we have alpha here do this when the alpha channel is extracted int numBytes = numberOfInterleavedComponents - 1; // 1 for Gray, 3 for RGB int numColumns = image.getSize().getWidthPx(); InflaterInputStream infStream = new InflaterInputStream(in, new Inflater()); DataInputStream dataStream = new DataInputStream(infStream); int offset = 0; int bytesPerRow = numberOfInterleavedComponents * numColumns; int filter; // here we need to inflate the PNG pixel data, which includes alpha, separate the alpha // channel and then deflate the RGB channels back again DeflaterOutputStream dos = new DeflaterOutputStream(out, new Deflater()); while ((filter = dataStream.read()) != -1) { byte[] bytes = new byte[bytesPerRow]; dataStream.readFully(bytes, 0, bytesPerRow); dos.write((byte) filter); for (int j = 0; j < numColumns; j++) { dos.write(bytes, offset, numBytes); offset += numberOfInterleavedComponents; } offset = 0; } dos.close(); } } finally { IOUtils.closeQuietly(in); } }
From source file:org.apache.fop.render.pdf.ImageRawPNGAdapter.java
/** {@inheritDoc} */ public void setup(PDFDocument doc) { super.setup(doc); ColorModel cm = ((ImageRawPNG) this.image).getColorModel(); if (cm instanceof IndexColorModel) { numberOfInterleavedComponents = 1; } else {/*from w w w . j a va2 s.c om*/ // this can be 1 (gray), 2 (gray + alpha), 3 (rgb) or 4 (rgb + alpha) // numberOfInterleavedComponents = (cm.hasAlpha() ? 1 : 0) + cm.getNumColorComponents(); numberOfInterleavedComponents = cm.getNumComponents(); } // set up image compression for non-alpha channel FlateFilter flate; try { flate = new FlateFilter(); flate.setApplied(true); flate.setPredictor(FlateFilter.PREDICTION_PNG_OPT); if (numberOfInterleavedComponents < 3) { // means palette (1) or gray (1) or gray + alpha (2) flate.setColors(1); } else { // means rgb (3) or rgb + alpha (4) flate.setColors(3); } flate.setColumns(image.getSize().getWidthPx()); flate.setBitsPerComponent(this.getBitsPerComponent()); } catch (PDFFilterException e) { throw new RuntimeException("FlateFilter configuration error", e); } this.pdfFilter = flate; this.disallowMultipleFilters(); // Handle transparency channel if applicable; note that for palette images the transparency is // not TRANSLUCENT if (cm.hasAlpha() && cm.getTransparency() == ColorModel.TRANSLUCENT) { doc.getProfile().verifyTransparencyAllowed(image.getInfo().getOriginalURI()); // TODO: Implement code to combine image with background color if transparency is not allowed // here we need to inflate the PNG pixel data, which includes alpha, separate the alpha channel // and then deflate it back again ByteArrayOutputStream baos = new ByteArrayOutputStream(); DeflaterOutputStream dos = new DeflaterOutputStream(baos, new Deflater()); InputStream in = ((ImageRawStream) image).createInputStream(); try { InflaterInputStream infStream = new InflaterInputStream(in, new Inflater()); DataInputStream dataStream = new DataInputStream(infStream); // offset is the byte offset of the alpha component int offset = numberOfInterleavedComponents - 1; // 1 for GA, 3 for RGBA int numColumns = image.getSize().getWidthPx(); int bytesPerRow = numberOfInterleavedComponents * numColumns; int filter; // read line by line; the first byte holds the filter while ((filter = dataStream.read()) != -1) { byte[] bytes = new byte[bytesPerRow]; dataStream.readFully(bytes, 0, bytesPerRow); dos.write((byte) filter); for (int j = 0; j < numColumns; j++) { dos.write(bytes, offset, 1); offset += numberOfInterleavedComponents; } offset = numberOfInterleavedComponents - 1; } dos.close(); } catch (IOException e) { throw new RuntimeException("Error processing transparency channel:", e); } finally { IOUtils.closeQuietly(in); } // set up alpha channel compression FlateFilter transFlate; try { transFlate = new FlateFilter(); transFlate.setApplied(true); transFlate.setPredictor(FlateFilter.PREDICTION_PNG_OPT); transFlate.setColors(1); transFlate.setColumns(image.getSize().getWidthPx()); transFlate.setBitsPerComponent(this.getBitsPerComponent()); } catch (PDFFilterException e) { throw new RuntimeException("FlateFilter configuration error", e); } BitmapImage alphaMask = new BitmapImage("Mask:" + this.getKey(), image.getSize().getWidthPx(), image.getSize().getHeightPx(), baos.toByteArray(), null); alphaMask.setPDFFilter(transFlate); alphaMask.disallowMultipleFilters(); alphaMask.setColorSpace(new PDFDeviceColorSpace(PDFDeviceColorSpace.DEVICE_GRAY)); softMask = doc.addImage(null, alphaMask).makeReference(); } }
From source file:org.apache.fop.render.ps.ImageEncoderPNG.java
/** {@inheritDoc} */ public void writeTo(OutputStream out) throws IOException { // TODO: refactor this code with equivalent PDF code InputStream in = ((ImageRawStream) image).createInputStream(); try {// ww w . jav a 2 s . c o m if (numberOfInterleavedComponents == 1 || numberOfInterleavedComponents == 3) { // means we have Gray, RGB, or Palette IOUtils.copy(in, out); } else { // means we have Gray + alpha or RGB + alpha int numBytes = numberOfInterleavedComponents - 1; // 1 for Gray, 3 for RGB int numColumns = image.getSize().getWidthPx(); InflaterInputStream infStream = new InflaterInputStream(in, new Inflater()); DataInputStream dataStream = new DataInputStream(infStream); int offset = 0; int bytesPerRow = numberOfInterleavedComponents * numColumns; int filter; // here we need to inflate the PNG pixel data, which includes alpha, separate the alpha // channel and then deflate the RGB channels back again // TODO: not using the baos below and using the original out instead (as happens in PDF) // would be preferable but that does not work with the rest of the postscript code; this // needs to be revisited ByteArrayOutputStream baos = new ByteArrayOutputStream(); DeflaterOutputStream dos = new DeflaterOutputStream(/* out */baos, new Deflater()); while ((filter = dataStream.read()) != -1) { byte[] bytes = new byte[bytesPerRow]; dataStream.readFully(bytes, 0, bytesPerRow); dos.write((byte) filter); for (int j = 0; j < numColumns; j++) { dos.write(bytes, offset, numBytes); offset += numberOfInterleavedComponents; } offset = 0; } dos.close(); IOUtils.copy(new ByteArrayInputStream(baos.toByteArray()), out); } } finally { IOUtils.closeQuietly(in); } }
From source file:org.dcm4che3.tool.jpg2dcm.Jpg2Dcm.java
private void readHeader(Attributes attrs, DataInputStream jpgInput) throws IOException { if (jpgInput.read() != FF || jpgInput.read() != SOI || jpgInput.read() != FF) { throw new IOException("JPEG stream does not start with FF D8 FF"); }//from ww w . j av a2s . c o m int marker = jpgInput.read(); int segmLen; boolean seenSOF = false; buffer[0] = (byte) FF; buffer[1] = (byte) SOI; buffer[2] = (byte) FF; buffer[3] = (byte) marker; jpgHeaderLen = 4; while (marker != SOS) { segmLen = jpgInput.readUnsignedShort(); if (buffer.length < jpgHeaderLen + segmLen + 2) { growBuffer(jpgHeaderLen + segmLen + 2); } buffer[jpgHeaderLen++] = (byte) (segmLen >>> 8); buffer[jpgHeaderLen++] = (byte) segmLen; jpgInput.readFully(buffer, jpgHeaderLen, segmLen - 2); if ((marker & 0xf0) == SOF && marker != DHT && marker != DAC) { seenSOF = true; int p = buffer[jpgHeaderLen] & 0xff; int y = ((buffer[jpgHeaderLen + 1] & 0xff) << 8) | (buffer[jpgHeaderLen + 2] & 0xff); int x = ((buffer[jpgHeaderLen + 3] & 0xff) << 8) | (buffer[jpgHeaderLen + 4] & 0xff); int nf = buffer[jpgHeaderLen + 5] & 0xff; attrs.setInt(Tag.SamplesPerPixel, VR.US, nf); if (nf == 3) { attrs.setString(Tag.PhotometricInterpretation, VR.CS, "YBR_FULL_422"); attrs.setInt(Tag.PlanarConfiguration, VR.US, 0); } else { attrs.setString(Tag.PhotometricInterpretation, VR.CS, "MONOCHROME2"); } attrs.setInt(Tag.Rows, VR.US, y); attrs.setInt(Tag.Columns, VR.US, x); attrs.setInt(Tag.BitsAllocated, VR.US, p > 8 ? 16 : 8); attrs.setInt(Tag.BitsStored, VR.US, p); attrs.setInt(Tag.HighBit, VR.US, p - 1); attrs.setInt(Tag.PixelRepresentation, VR.US, 0); } if (noAPPn & (marker & 0xf0) == APP) { jpgLen -= segmLen + 2; jpgHeaderLen -= 4; } else { jpgHeaderLen += segmLen - 2; } if (jpgInput.read() != FF) { throw new IOException("Missing SOS segment in JPEG stream"); } marker = jpgInput.read(); buffer[jpgHeaderLen++] = (byte) FF; buffer[jpgHeaderLen++] = (byte) marker; } if (!seenSOF) { throw new IOException("Missing SOF segment in JPEG stream"); } }
From source file:org.dcm4che2.tool.jpg2dcm.Jpg2Dcm.java
private void readHeader(DicomObject attrs, DataInputStream jpgInput) throws IOException { if (jpgInput.read() != FF || jpgInput.read() != SOI || jpgInput.read() != FF) { throw new IOException("JPEG stream does not start with FF D8 FF"); }/* w w w. j av a 2 s .c o m*/ int marker = jpgInput.read(); int segmLen; boolean seenSOF = false; buffer[0] = (byte) FF; buffer[1] = (byte) SOI; buffer[2] = (byte) FF; buffer[3] = (byte) marker; jpgHeaderLen = 4; while (marker != SOS) { segmLen = jpgInput.readUnsignedShort(); if (buffer.length < jpgHeaderLen + segmLen + 2) { growBuffer(jpgHeaderLen + segmLen + 2); } buffer[jpgHeaderLen++] = (byte) (segmLen >>> 8); buffer[jpgHeaderLen++] = (byte) segmLen; jpgInput.readFully(buffer, jpgHeaderLen, segmLen - 2); if ((marker & 0xf0) == SOF && marker != DHT && marker != DAC) { seenSOF = true; int p = buffer[jpgHeaderLen] & 0xff; int y = ((buffer[jpgHeaderLen + 1] & 0xff) << 8) | (buffer[jpgHeaderLen + 2] & 0xff); int x = ((buffer[jpgHeaderLen + 3] & 0xff) << 8) | (buffer[jpgHeaderLen + 4] & 0xff); int nf = buffer[jpgHeaderLen + 5] & 0xff; attrs.putInt(Tag.SamplesPerPixel, VR.US, nf); if (nf == 3) { attrs.putString(Tag.PhotometricInterpretation, VR.CS, "YBR_FULL_422"); attrs.putInt(Tag.PlanarConfiguration, VR.US, 0); } else { attrs.putString(Tag.PhotometricInterpretation, VR.CS, "MONOCHROME2"); } attrs.putInt(Tag.Rows, VR.US, y); attrs.putInt(Tag.Columns, VR.US, x); attrs.putInt(Tag.BitsAllocated, VR.US, p > 8 ? 16 : 8); attrs.putInt(Tag.BitsStored, VR.US, p); attrs.putInt(Tag.HighBit, VR.US, p - 1); attrs.putInt(Tag.PixelRepresentation, VR.US, 0); } if (noAPPn & (marker & 0xf0) == APP) { jpgLen -= segmLen + 2; jpgHeaderLen -= 4; } else { jpgHeaderLen += segmLen - 2; } if (jpgInput.read() != FF) { throw new IOException("Missing SOS segment in JPEG stream"); } marker = jpgInput.read(); buffer[jpgHeaderLen++] = (byte) FF; buffer[jpgHeaderLen++] = (byte) marker; } if (!seenSOF) { throw new IOException("Missing SOF segment in JPEG stream"); } }
From source file:com.example.google.play.apkx.SampleDownloaderActivity.java
/** * Go through each of the Expansion APK files and open each as a zip file. * Calculate the CRC for each file and return false if any fail to match. * * @return true if XAPKZipFile is successful *//*w w w .j a v a 2 s . c o m*/ void validateXAPKZipFiles() { AsyncTask<Object, DownloadProgressInfo, Boolean> validationTask = new AsyncTask<Object, DownloadProgressInfo, Boolean>() { @Override protected void onPreExecute() { mDashboard.setVisibility(View.VISIBLE); mCellMessage.setVisibility(View.GONE); mStatusText.setText(R.string.text_verifying_download); mPauseButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mCancelValidation = true; } }); mPauseButton.setText(R.string.text_button_cancel_verify); super.onPreExecute(); } @Override protected Boolean doInBackground(Object... params) { for (XAPKFile xf : xAPKS) { String fileName = Helpers.getExpansionAPKFileName(SampleDownloaderActivity.this, xf.mIsMain, xf.mFileVersion); if (!Helpers.doesFileExist(SampleDownloaderActivity.this, fileName, xf.mFileSize, false)) return false; fileName = Helpers.generateSaveFileName(SampleDownloaderActivity.this, fileName); ZipResourceFile zrf; byte[] buf = new byte[1024 * 256]; try { zrf = new ZipResourceFile(fileName); ZipEntryRO[] entries = zrf.getAllEntries(); /** * First calculate the total compressed length */ long totalCompressedLength = 0; for (ZipEntryRO entry : entries) { totalCompressedLength += entry.mCompressedLength; } float averageVerifySpeed = 0; long totalBytesRemaining = totalCompressedLength; long timeRemaining; /** * Then calculate a CRC for every file in the * Zip file, comparing it to what is stored in * the Zip directory. Note that for compressed * Zip files we must extract the contents to do * this comparison. */ for (ZipEntryRO entry : entries) { if (-1 != entry.mCRC32) { long length = entry.mUncompressedLength; CRC32 crc = new CRC32(); DataInputStream dis = null; try { dis = new DataInputStream(zrf.getInputStream(entry.mFileName)); long startTime = SystemClock.uptimeMillis(); while (length > 0) { int seek = (int) (length > buf.length ? buf.length : length); dis.readFully(buf, 0, seek); crc.update(buf, 0, seek); length -= seek; long currentTime = SystemClock.uptimeMillis(); long timePassed = currentTime - startTime; if (timePassed > 0) { float currentSpeedSample = (float) seek / (float) timePassed; if (0 != averageVerifySpeed) { averageVerifySpeed = SMOOTHING_FACTOR * currentSpeedSample + (1 - SMOOTHING_FACTOR) * averageVerifySpeed; } else { averageVerifySpeed = currentSpeedSample; } totalBytesRemaining -= seek; timeRemaining = (long) (totalBytesRemaining / averageVerifySpeed); this.publishProgress(new DownloadProgressInfo(totalCompressedLength, totalCompressedLength - totalBytesRemaining, timeRemaining, averageVerifySpeed)); } startTime = currentTime; if (mCancelValidation) return true; } if (crc.getValue() != entry.mCRC32) { Log.e(Constants.TAG, "CRC does not match for entry: " + entry.mFileName); Log.e(Constants.TAG, "In file: " + entry.getZipFileName()); return false; } } finally { if (null != dis) { dis.close(); } } } } } catch (IOException e) { e.printStackTrace(); return false; } } return true; } @Override protected void onProgressUpdate(DownloadProgressInfo... values) { onDownloadProgress(values[0]); super.onProgressUpdate(values); } @Override protected void onPostExecute(Boolean result) { if (result) { mDashboard.setVisibility(View.VISIBLE); mCellMessage.setVisibility(View.GONE); mStatusText.setText(R.string.text_validation_complete); mPauseButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startMovie(); } }); mPauseButton.setText(android.R.string.ok); } else { mDashboard.setVisibility(View.VISIBLE); mCellMessage.setVisibility(View.GONE); mStatusText.setText(R.string.text_validation_failed); mPauseButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { finish(); } }); mPauseButton.setText(android.R.string.cancel); } super.onPostExecute(result); } }; validationTask.execute(new Object()); }