List of usage examples for java.io DataInputStream read
public final int read(byte b[]) throws IOException
b
. From source file:org.apache.hadoop.mapred.TestShuffleHandler.java
@Test(timeout = 10000) public void testKeepAlive() throws Exception { final ArrayList<Throwable> failures = new ArrayList<Throwable>(1); Configuration conf = new Configuration(); conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0); conf.setBoolean(ShuffleHandler.SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, true); // try setting to -ve keep alive timeout. conf.setInt(ShuffleHandler.SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT, -100); final LastSocketAddress lastSocketAddress = new LastSocketAddress(); ShuffleHandler shuffleHandler = new ShuffleHandler() { @Override/*w w w . ja va2 s . c o m*/ protected Shuffle getShuffle(final Configuration conf) { // replace the shuffle handler with one stubbed for testing return new Shuffle(conf) { @Override protected MapOutputInfo getMapOutputInfo(String base, String mapId, int reduce, String user) throws IOException { return null; } @Override protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException { } @Override protected void populateHeaders(List<String> mapIds, String jobId, String user, int reduce, HttpRequest request, HttpResponse response, boolean keepAliveParam, Map<String, MapOutputInfo> infoMap) throws IOException { // Send some dummy data (populate content length details) ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1); DataOutputBuffer dob = new DataOutputBuffer(); header.write(dob); dob = new DataOutputBuffer(); for (int i = 0; i < 100000; ++i) { header.write(dob); } long contentLength = dob.getLength(); // for testing purpose; // disable connectinKeepAliveEnabled if keepAliveParam is available if (keepAliveParam) { connectionKeepAliveEnabled = false; } super.setResponseHeaders(response, keepAliveParam, contentLength); } @Override protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user, String mapId, int reduce, MapOutputInfo info) throws IOException { lastSocketAddress.setAddress(ch.getRemoteAddress()); HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); // send a shuffle header and a lot of data down the channel // to trigger a broken pipe ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1); DataOutputBuffer dob = new DataOutputBuffer(); header.write(dob); ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength())); dob = new DataOutputBuffer(); for (int i = 0; i < 100000; ++i) { header.write(dob); } return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength())); } @Override protected void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) { if (failures.size() == 0) { failures.add(new Error()); ctx.getChannel().close(); } } @Override protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) { if (failures.size() == 0) { failures.add(new Error()); ctx.getChannel().close(); } } }; } }; shuffleHandler.init(conf); shuffleHandler.start(); String shuffleBaseURL = "http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY); URL url = new URL(shuffleBaseURL + "/mapOutput?job=job_12345_1&reduce=1&" + "map=attempt_12345_1_m_1_0"); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME); conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION); conn.connect(); DataInputStream input = new DataInputStream(conn.getInputStream()); Assert.assertEquals(HttpHeaders.KEEP_ALIVE, conn.getHeaderField(HttpHeaders.CONNECTION)); Assert.assertEquals("timeout=1", conn.getHeaderField(HttpHeaders.KEEP_ALIVE)); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); ShuffleHeader header = new ShuffleHeader(); header.readFields(input); byte[] buffer = new byte[1024]; while (input.read(buffer) != -1) { } SocketAddress firstAddress = lastSocketAddress.getSocketAddres(); input.close(); // For keepAlive via URL url = new URL(shuffleBaseURL + "/mapOutput?job=job_12345_1&reduce=1&" + "map=attempt_12345_1_m_1_0&keepAlive=true"); conn = (HttpURLConnection) url.openConnection(); conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME); conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION); conn.connect(); input = new DataInputStream(conn.getInputStream()); Assert.assertEquals(HttpHeaders.KEEP_ALIVE, conn.getHeaderField(HttpHeaders.CONNECTION)); Assert.assertEquals("timeout=1", conn.getHeaderField(HttpHeaders.KEEP_ALIVE)); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); header = new ShuffleHeader(); header.readFields(input); input.close(); SocketAddress secondAddress = lastSocketAddress.getSocketAddres(); Assert.assertNotNull("Initial shuffle address should not be null", firstAddress); Assert.assertNotNull("Keep-Alive shuffle address should not be null", secondAddress); Assert.assertEquals("Initial shuffle address and keep-alive shuffle " + "address should be the same", firstAddress, secondAddress); }
From source file:org.akaza.openclinica.control.core.SecureController.java
License:asdf
public void dowloadFile(File f, String contentType) throws Exception { response.setHeader("Content-disposition", "attachment; filename=\"" + f.getName() + "\";"); response.setContentType("text/xml"); response.setHeader("Pragma", "public"); ServletOutputStream op = response.getOutputStream(); DataInputStream in = null; try {// ww w .j a v a 2s .c om response.setContentType("text/xml"); response.setHeader("Pragma", "public"); response.setContentLength((int) f.length()); byte[] bbuf = new byte[(int) f.length()]; in = new DataInputStream(new FileInputStream(f)); int length; while (in != null && (length = in.read(bbuf)) != -1) { op.write(bbuf, 0, length); } in.close(); op.flush(); op.close(); } catch (Exception ee) { ee.printStackTrace(); } finally { if (in != null) { in.close(); } if (op != null) { op.close(); } } }
From source file:tor.TorCrypto.java
/** * Parses a public key encoded as ASN.1//from w w w .j a va 2 s .c o m * * @param rsapublickey ASN.1 Encoded public key * @return PublicKey */ public static PublicKey asn1GetPublicKey(byte[] rsapublickey) { int blobsize = rsapublickey.length; DataInputStream dis = null; int jint = 0; // int to represent unsigned byte or unsigned short int datacount = 0; try { // --- Try to read the ANS.1 encoded RSAPublicKey blob ------------- ByteArrayInputStream bis = new ByteArrayInputStream(rsapublickey); dis = new DataInputStream(bis); if (dis.readByte() != 0x30) // asn.1 encoded starts with 0x30 return null; jint = dis.readUnsignedByte(); // asn.1 is 0x80 plus number of bytes // representing data count if (jint == 0x81) datacount = dis.readUnsignedByte(); // datalength is specified // in next byte. else if (jint == 0x82) // bytes count for any supported keysize // would be at most 2 bytes datacount = dis.readUnsignedShort(); // datalength is specified // in next 2 bytes else return null; // all supported publickey byte-sizes can be // specified in at most 2 bytes if ((jint - 0x80 + 2 + datacount) != blobsize) // sanity check for // correct number of // remaining bytes return null; // System.out // .println("\nRead outer sequence bytes; validated outer asn.1 consistency "); // ------- Next attempt to read Integer sequence for modulus ------ if (dis.readUnsignedByte() != 0x02) // next byte read must be // Integer asn.1 specifier return null; jint = dis.readUnsignedByte(); // asn.1 is 0x80 plus number of bytes // representing data count if (jint == 0x81) datacount = dis.readUnsignedByte(); // datalength is specified // in next byte. else if (jint == 0x82) // bytes count for any supported keysize // would be at most 2 bytes datacount = dis.readUnsignedShort(); // datalength is specified // in next 2 bytes else return null; // all supported publickey modulus byte-sizes can // be specified in at most 2 bytes // ---- next bytes are big-endian ordered modulus ----- byte[] modulus = new byte[datacount]; int modbytes = dis.read(modulus); if (modbytes != datacount) // if we can read enought modulus bytes // ... return null; //System.out.println("Read modulus"); // ------- Next attempt to read Integer sequence for public exponent // ------ if (dis.readUnsignedByte() != 0x02) // next byte read must be // Integer asn.1 specifier return null; datacount = dis.readUnsignedByte(); // size of modulus is specified // in one byte byte[] exponent = new byte[datacount]; int expbytes = dis.read(exponent); if (expbytes != datacount) return null; //System.out.println("Read exponent"); // ----- Finally, create the PublicKey object from modulus and // public exponent -------- RSAPublicKeySpec pubKeySpec = new RSAPublicKeySpec(new BigInteger(1, modulus), new BigInteger(1, exponent)); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); PublicKey pubKey = keyFactory.generatePublic(pubKeySpec); return pubKey; } catch (Exception exc) { return null; } finally { try { dis.close(); } catch (Exception exc) { /* ignore */ ; } } }
From source file:org.commoncrawl.service.listcrawler.CacheManager.java
/** * loadCacheItemFromDisk - load a single cache item from disk * //from w w w . j a va 2s . c o m * @param file * @param optTargetURL * @param location * @return * @throws IOException */ private CacheItem loadCacheItemFromDisk(FileInputStream file, String optTargetURL, long location) throws IOException { long timeStart = System.currentTimeMillis(); // and read out the Item Header ... CacheItemHeader itemHeader = new CacheItemHeader(); itemHeader.readHeader(new DataInputStream(file)); // see if it is valid ... if (!Arrays.equals(itemHeader._sync, _header._sync)) { LOG.error("### Item Lookup for URL:" + optTargetURL + " Record at:" + location + " failed - corrupt sync bytes detected!!!"); } else { CRC32 crc32 = new CRC32(); // ok deserialize the bytes ... CacheItem item = new CacheItem(); CheckedInputStream checkedStream = new CheckedInputStream(file, crc32); DataInputStream itemStream = new DataInputStream(checkedStream); item.readFields(itemStream); // read the content buffer length int contentBufferLen = itemStream.readInt(); if (contentBufferLen != 0) { byte data[] = new byte[contentBufferLen]; itemStream.read(data); item.setContent(new Buffer(data)); } // cache crc long crcValueComputed = crc32.getValue(); // read disk crc long crcValueOnDisk = itemStream.readLong(); // validate if (crcValueComputed == crcValueOnDisk) { String canonicalURL = URLUtils.canonicalizeURL(item.getUrl(), true); if (optTargetURL.length() == 0 || optTargetURL.equals(canonicalURL)) { if (isValidCacheItem(item)) { LOG.info("### Item Lookup for URL:" + optTargetURL + " Record at:" + location + " completed in:" + (System.currentTimeMillis() - timeStart)); return item; } else { LOG.info("### Item Lookup for URL:" + optTargetURL + " Record at:" + location + " failed with invalid result code"); } } else { LOG.info("### Item Lookup for URL:" + optTargetURL + " Record at:" + location + " failed with url mismatch. record url:" + item.getUrl()); } } else { LOG.error("### Item Lookup for URL:" + optTargetURL + " Record at:" + location + " failed - crc mismatch!!!"); } } return null; }
From source file:org.torproject.android.service.TorService.java
private int initControlConnection(int maxTries, boolean isReconnect) throws Exception, RuntimeException { int controlPort = -1; int attempt = 0; logNotice("Waiting for control port..."); while (conn == null && attempt++ < maxTries) { try {/*from ww w . ja va 2 s. c o m*/ controlPort = getControlPort(); if (controlPort != -1) { logNotice("Connecting to control port: " + controlPort); torConnSocket = new Socket(IP_LOCALHOST, controlPort); torConnSocket.setSoTimeout(CONTROL_SOCKET_TIMEOUT); conn = new TorControlConnection(torConnSocket); conn.launchThread(true);//is daemon break; } } catch (Exception ce) { conn = null; // logException( "Error connecting to Tor local control port: " + ce.getMessage(),ce); } try { // logNotice("waiting..."); Thread.sleep(1000); } catch (Exception e) { } } if (conn != null) { logNotice("SUCCESS connected to Tor control port."); File fileCookie = new File(OrbotApp.appCacheHome, TOR_CONTROL_COOKIE); if (fileCookie.exists()) { byte[] cookie = new byte[(int) fileCookie.length()]; DataInputStream fis = new DataInputStream(new FileInputStream(fileCookie)); fis.read(cookie); fis.close(); conn.authenticate(cookie); logNotice("SUCCESS - authenticated to control port."); sendCallbackLogMessage( getString(R.string.tor_process_starting) + ' ' + getString(R.string.tor_process_complete)); addEventHandler(); String torProcId = conn.getInfo("process/pid"); String confSocks = conn.getInfo("net/listeners/socks"); StringTokenizer st = new StringTokenizer(confSocks, " "); confSocks = st.nextToken().split(":")[1]; confSocks = confSocks.substring(0, confSocks.length() - 1); mPortSOCKS = Integer.parseInt(confSocks); return Integer.parseInt(torProcId); } else { logNotice("Tor authentication cookie does not exist yet"); conn = null; } } return -1; }
From source file:org.dcm4che3.tool.jpg2dcm.Jpg2Dcm.java
public void convert(CommandLine cl, File jpgFile, File dcmFile) throws IOException { jpgHeaderLen = 0;//from w w w .j a v a 2 s .c o m jpgLen = (int) jpgFile.length(); DataInputStream jpgInput = new DataInputStream(new BufferedInputStream(new FileInputStream(jpgFile))); try { Attributes attrs = new Attributes(); try { if (cl.hasOption("mpeg") && cl.hasOption("c")) attrs = SAXReader.parse(cl.getOptionValue("c")); else if (cl.hasOption("c")) attrs = SAXReader.parse(cl.getOptionValue("c")); } catch (Exception e) { throw new FileNotFoundException("Configuration XML file not found"); } attrs.setString(Tag.SpecificCharacterSet, VR.CS, charset); if (noAPPn || missingRowsColumnsSamplesPMI(attrs)) { readHeader(attrs, jpgInput); } ensureUS(attrs, Tag.BitsAllocated, 8); ensureUS(attrs, Tag.BitsStored, attrs.getInt(Tag.BitsAllocated, (buffer[jpgHeaderLen] & 0xff) > 8 ? 16 : 8)); ensureUS(attrs, Tag.HighBit, attrs.getInt(Tag.BitsStored, (buffer[jpgHeaderLen] & 0xff)) - 1); ensureUS(attrs, Tag.PixelRepresentation, 0); ensureUID(attrs, Tag.StudyInstanceUID); ensureUID(attrs, Tag.SeriesInstanceUID); ensureUID(attrs, Tag.SOPInstanceUID); Date now = new Date(); attrs.setDate(Tag.InstanceCreationDate, VR.DA, now); attrs.setDate(Tag.InstanceCreationTime, VR.TM, now); Attributes fmi = attrs.createFileMetaInformation(transferSyntax); DicomOutputStream dos = new DicomOutputStream(dcmFile); try { dos.writeDataset(fmi, attrs); dos.writeHeader(Tag.PixelData, VR.OB, -1); if (!cl.hasOption("mpeg")) { dos.writeHeader(Tag.Item, null, 0); dos.writeHeader(Tag.Item, null, (jpgLen + 1) & ~1); dos.write(buffer, 0, jpgHeaderLen); } int r; while ((r = jpgInput.read(buffer)) > 0) { dos.write(buffer, 0, r); } if (!cl.hasOption("mpeg")) { if ((jpgLen & 1) != 0) { dos.write(0); } } dos.writeHeader(Tag.SequenceDelimitationItem, null, 0); } finally { dos.close(); } } finally { jpgInput.close(); } }
From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageDecompressor.java
/** * Process image file.// w w w. java 2 s .c om */ private void go() throws IOException { long start = System.currentTimeMillis(); System.out.println("Decompressing image file: " + inputFile + " to " + outputFile); DataInputStream in = null; DataOutputStream out = null; try { // setup in PositionTrackingInputStream ptis = new PositionTrackingInputStream( new FileInputStream(new File(inputFile))); in = new DataInputStream(ptis); // read header information int imgVersion = in.readInt(); if (!LayoutVersion.supports(Feature.FSIMAGE_COMPRESSION, imgVersion)) { System.out.println("Image is not compressed. No output will be produced."); return; } int namespaceId = in.readInt(); long numFiles = in.readLong(); long genstamp = in.readLong(); long imgTxId = -1; if (LayoutVersion.supports(Feature.STORED_TXIDS, imgVersion)) { imgTxId = in.readLong(); } FSImageCompression compression = FSImageCompression.readCompressionHeader(new Configuration(), in); if (compression.isNoOpCompression()) { System.out.println("Image is not compressed. No output will be produced."); return; } in = BufferedByteInputStream.wrapInputStream(compression.unwrapInputStream(in), FSImage.LOAD_SAVE_BUFFER_SIZE, FSImage.LOAD_SAVE_CHUNK_SIZE); System.out.println("Starting decompression."); // setup output out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(outputFile))); // write back the uncompressed information out.writeInt(imgVersion); out.writeInt(namespaceId); out.writeLong(numFiles); out.writeLong(genstamp); if (LayoutVersion.supports(Feature.STORED_TXIDS, imgVersion)) { out.writeLong(imgTxId); } // no compression out.writeBoolean(false); // copy the data long size = new File(inputFile).length(); // read in 1MB chunks byte[] block = new byte[1024 * 1024]; while (true) { int bytesRead = in.read(block); if (bytesRead <= 0) break; out.write(block, 0, bytesRead); printProgress(ptis.getPos(), size); } out.close(); long stop = System.currentTimeMillis(); System.out.println("Input file : " + inputFile + " size: " + size); System.out.println("Output file: " + outputFile + " size: " + new File(outputFile).length()); System.out.println("Decompression completed in " + (stop - start) + " ms."); } finally { if (in != null) in.close(); if (out != null) out.close(); } }
From source file:org.psystems.dicom.daemon.Jpg2Dcm.java
public void convert(File jpgFile, File dcmFile) throws IOException { jpgHeaderLen = 0;/* w w w . j a v a 2 s .c om*/ jpgLen = (int) jpgFile.length(); DataInputStream jpgInput = new DataInputStream(new BufferedInputStream(new FileInputStream(jpgFile))); try { DicomObject attrs = new BasicDicomObject(); attrs.putString(Tag.SpecificCharacterSet, VR.CS, charset); for (Enumeration en = cfg.propertyNames(); en.hasMoreElements();) { String key = (String) en.nextElement(); int[] tagPath = Tag.toTagPath(key); int last = tagPath.length - 1; VR vr = attrs.vrOf(tagPath[last]); if (vr == VR.SQ) { attrs.putSequence(tagPath); } else { attrs.putString(tagPath, vr, cfg.getProperty(key)); } } if (noAPPn || missingRowsColumnsSamplesPMI(attrs)) { readHeader(attrs, jpgInput); } ensureUS(attrs, Tag.BitsAllocated, 8); ensureUS(attrs, Tag.BitsStored, attrs.getInt(Tag.BitsAllocated)); ensureUS(attrs, Tag.HighBit, attrs.getInt(Tag.BitsStored) - 1); ensureUS(attrs, Tag.PixelRepresentation, 0); ensureUID(attrs, Tag.StudyInstanceUID); ensureUID(attrs, Tag.SeriesInstanceUID); ensureUID(attrs, Tag.SOPInstanceUID); Date now = new Date(); attrs.putDate(Tag.InstanceCreationDate, VR.DA, now); attrs.putDate(Tag.InstanceCreationTime, VR.TM, now); attrs.initFileMetaInformation(transferSyntax); FileOutputStream fos = new FileOutputStream(dcmFile); BufferedOutputStream bos = new BufferedOutputStream(fos); DicomOutputStream dos = new DicomOutputStream(bos); try { dos.writeDicomFile(attrs); dos.writeHeader(Tag.PixelData, VR.OB, -1); dos.writeHeader(Tag.Item, null, 0); dos.writeHeader(Tag.Item, null, (jpgLen + 1) & ~1); dos.write(buffer, 0, jpgHeaderLen); int r; while ((r = jpgInput.read(buffer)) > 0) { dos.write(buffer, 0, r); } if ((jpgLen & 1) != 0) { dos.write(0); } dos.writeHeader(Tag.SequenceDelimitationItem, null, 0); } finally { dos.close(); } } finally { jpgInput.close(); } }
From source file:org.ramadda.repository.database.DatabaseManager.java
/** * _more_/* w w w.j a v a 2s .c om*/ * * @param dos _more_ * * @return _more_ * * @throws Exception _more_ */ private String readString(DataInputStream dos) throws Exception { int length = dos.readInt(); if (length < 0) { return null; } byte[] bytes = new byte[length]; dos.read(bytes); return new String(bytes); }
From source file:org.dcm4che2.tool.jpg2dcm.Jpg2Dcm.java
@SuppressWarnings("rawtypes") public void convert(File jpgFile, File dcmFile, String patientName, String patientID, String studyuid, String seriesuid, int n) throws IOException { jpgHeaderLen = 0;//from www . j a va 2 s . c o m jpgLen = (int) jpgFile.length(); DataInputStream jpgInput = new DataInputStream(new BufferedInputStream(new FileInputStream(jpgFile))); try { DicomObject attrs = new BasicDicomObject(); attrs.putString(Tag.SpecificCharacterSet, VR.CS, charset); for (Enumeration en = cfg.propertyNames(); en.hasMoreElements();) { String key = (String) en.nextElement(); int[] tagPath = Tag.toTagPath(key); int last = tagPath.length - 1; VR vr = attrs.vrOf(tagPath[last]); if (vr == VR.SQ) { attrs.putSequence(tagPath); } else { attrs.putString(tagPath, vr, cfg.getProperty(key)); } } if (noAPPn || missingRowsColumnsSamplesPMI(attrs)) { readHeader(attrs, jpgInput); } ensureUS(attrs, Tag.BitsAllocated, 8); ensureUS(attrs, Tag.BitsStored, attrs.getInt(Tag.BitsAllocated)); ensureUS(attrs, Tag.HighBit, attrs.getInt(Tag.BitsStored) - 1); ensureUS(attrs, Tag.PixelRepresentation, 0); //ensureUID(attrs, Tag.StudyInstanceUID); //ensureUID(attrs, Tag.SeriesInstanceUID); ensureUID(attrs, Tag.SOPInstanceUID); Date now = new Date(); attrs.putDate(Tag.InstanceCreationDate, VR.DA, now); attrs.putDate(Tag.InstanceCreationTime, VR.TM, now); attrs.initFileMetaInformation(transferSyntax); attrs.putDate(Tag.StudyDate, VR.DA, now); attrs.putDate(Tag.StudyTime, VR.TM, now); //attrs.putString( Tag.PatientName, VR.PN, patientName ); attrs.putBytes(Tag.PatientName, VR.PN, patientName.getBytes(this.charset)); attrs.putString(Tag.PatientID, VR.LO, patientID); attrs.putString(Tag.StudyInstanceUID, VR.UI, studyuid); attrs.putString(Tag.SeriesInstanceUID, VR.UI, seriesuid); attrs.putString(Tag.StudyID, VR.SH, "99"); attrs.putString(Tag.SeriesNumber, VR.IS, "1"); attrs.putString(Tag.InstanceNumber, VR.IS, String.valueOf(n)); FileOutputStream fos = new FileOutputStream(dcmFile); BufferedOutputStream bos = new BufferedOutputStream(fos); DicomOutputStream dos = new DicomOutputStream(bos); try { dos.writeDicomFile(attrs); dos.writeHeader(Tag.PixelData, VR.OB, -1); dos.writeHeader(Tag.Item, null, 0); dos.writeHeader(Tag.Item, null, (jpgLen + 1) & ~1); dos.write(buffer, 0, jpgHeaderLen); int r; while ((r = jpgInput.read(buffer)) > 0) { dos.write(buffer, 0, r); } if ((jpgLen & 1) != 0) { dos.write(0); } dos.writeHeader(Tag.SequenceDelimitationItem, null, 0); } finally { dos.close(); } } finally { jpgInput.close(); } }