List of usage examples for java.io PushbackInputStream unread
public void unread(byte[] b) throws IOException
From source file:edu.mayo.trilliumbridge.webapp.TransformerController.java
/** * From http://stackoverflow.com/a/9737529/656853 *///from w w w . j a v a2s. c o m private InputStream checkForUtf8BOMAndDiscardIfAny(InputStream inputStream) throws IOException { PushbackInputStream pushbackInputStream = new PushbackInputStream(new BufferedInputStream(inputStream), 3); byte[] bom = new byte[3]; if (pushbackInputStream.read(bom) != -1) { if (!(bom[0] == (byte) 0xEF && bom[1] == (byte) 0xBB && bom[2] == (byte) 0xBF)) { pushbackInputStream.unread(bom); } } return pushbackInputStream; }
From source file:com.digitalpebble.behemoth.io.warc.HttpResponse.java
private void parseHeaders(PushbackInputStream in, StringBuilder line) throws IOException { while (readLine(in, line, true) != 0) { // handle HTTP responses with missing blank line after headers int pos;/*from w w w.j ava2 s . co m*/ if (((pos = line.indexOf("<!DOCTYPE")) != -1) || ((pos = line.indexOf("<HTML")) != -1) || ((pos = line.indexOf("<html")) != -1)) { in.unread(line.substring(pos).getBytes("UTF-8")); line.setLength(pos); try { // TODO: (CM) We don't know the header names here // since we're just handling them generically. It would // be nice to provide some sort of mapping function here // for the returned header names to the standard metadata // names in the ParseData class processHeaderLine(line); } catch (Exception e) { } return; } processHeaderLine(line); } }
From source file:in.arun.faces.fo.pdf.FoOutputStream.java
private PdfResult buildPdf(FoOutputStream foOutput) { byte[] responseText = foOutput.getBytes(); if (responseText == null) { return null; }/*from w w w .j a v a 2 s . c om*/ PdfResult result = new PdfResult(); try { PushbackInputStream pbis = new PushbackInputStream( new BufferedInputStream(new ByteArrayInputStream(responseText))); ByteArrayOutputStream baos = new ByteArrayOutputStream(); //Skip contentType text/html - Looking for bug fix! //pbis.skip(9); while (pbis.available() > 0) { pbis.mark(1); if (pbis.read() == '<') { pbis.unread('<'); break; } } //Transforming XML to PDF FopFactory fopFactory = FopFactory.newInstance(); Fop fop = fopFactory.newFop(MimeConstants.MIME_PDF, baos); TransformerFactory tFactory = TransformerFactory.newInstance(); Transformer transformer = tFactory.newTransformer(); Source src = new StreamSource(pbis); Result res = new SAXResult(fop.getDefaultHandler()); transformer.transform(src, res); result.contentType = MimeConstants.MIME_PDF; result.content = baos.toByteArray(); } catch (IOException | FOPException | TransformerException x) { logger.log(Level.SEVERE, "Error while trying to create PDF.", x); StringBuilder builder = new StringBuilder(); builder.append(x.getMessage()); builder.append("<br/>"); builder.append("<pre>"); String formattedFo = new String(responseText); formattedFo = formattedFo.replaceAll("<", "<"); formattedFo = formattedFo.replaceAll(">", ">"); builder.append(formattedFo); builder.append("</pre>"); result.contentType = "text/html"; result.content = builder.toString().getBytes(); } return result; }
From source file:com.handywedge.binarystore.store.azure.BinaryStoreManagerImpl.java
@SuppressWarnings("unused") @Override//from w w w. j a v a 2 s .c o m public BinaryInfo upload(StorageInfo storage, BinaryInfo binary, InputStream inStream) throws StoreException { logger.info("ABS update method: start."); logger.debug("" + storage.toString()); logger.debug("?" + binary.toString()); long startSingle = System.currentTimeMillis(); CloudBlobClient bClient = getABSClient(binary.getBucketName(), true); BinaryInfo rtnBinary = new BinaryInfo(); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); long written = IOUtils.copyLarge(inStream, baos, 0, BINARY_PART_SIZE_5MB); byte[] data = baos.toByteArray(); InputStream awsInputStream = new ByteArrayInputStream(data); CloudBlockBlob blob = bClient.getContainerReference(binary.getBucketName()) .getBlockBlobReference(binary.getFileName()); if (written < BINARY_PART_SIZE_5MB) { BlobOutputStream blobOutputStream = blob.openOutputStream(); int next = awsInputStream.read(); while (next != -1) { blobOutputStream.write(next); next = awsInputStream.read(); } blobOutputStream.close(); blob.downloadAttributes(); BlobProperties properties = blob.getProperties(); properties.setContentType(binary.getContentType()); blob.uploadProperties(); } else { int firstByte = 0; int partNumber = 1; Boolean isFirstChunck = true; Boolean overSizeLimit = false; List<BlockEntry> blockList = new ArrayList<BlockEntry>(); InputStream firstChunck = new ByteArrayInputStream(data); PushbackInputStream chunckableInputStream = new PushbackInputStream(inStream, 1); while (-1 != (firstByte = chunckableInputStream.read())) { long partSize = 0; chunckableInputStream.unread(firstByte); File tempFile = File.createTempFile( UUID.randomUUID().toString().concat("-part").concat(String.valueOf(partNumber)), "tmp"); tempFile.deleteOnExit(); OutputStream os = null; try { os = new BufferedOutputStream(new FileOutputStream(tempFile.getAbsolutePath())); if (isFirstChunck == true) { partSize = IOUtils.copyLarge(firstChunck, os, 0, (BINARY_PART_SIZE_5MB)); isFirstChunck = false; } else { partSize = IOUtils.copyLarge(chunckableInputStream, os, 0, (BINARY_PART_SIZE_5MB)); } written += partSize; if (written > BINARY_PART_SIZE_5MB * 1024) { // 5GB overSizeLimit = true; logger.error("OVERSIZED FILE ({}). STARTING ABORT", written); break; } } finally { IOUtils.closeQuietly(os); } FileInputStream chunk = new FileInputStream(tempFile); Boolean isLastPart = -1 == (firstByte = chunckableInputStream.read()); if (!isLastPart) { chunckableInputStream.unread(firstByte); } String blockId = Base64.encodeBase64String( String.format("BlockId%07d", partNumber).getBytes(StandardCharsets.UTF_8)); BlockEntry block = new BlockEntry(blockId); blockList.add(block); blob.uploadBlock(blockId, chunk, partSize); partNumber++; chunk.close(); } blob.commitBlockList(blockList); blob.downloadAttributes(); BlobProperties properties = blob.getProperties(); properties.setContentType(binary.getContentType()); blob.uploadProperties(); logger.debug("commitBlockList."); } if (blob.exists()) { rtnBinary = createReturnBinaryInfo(blob); } else { rtnBinary = binary; } } catch (StorageException se) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, se, binary.getFileName()); } catch (URISyntaxException ue) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, ue, binary.getFileName()); } catch (FileNotFoundException fe) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, fe, binary.getFileName()); } catch (IOException ioe) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, ioe, binary.getFileName()); } finally { if (inStream != null) { try { inStream.close(); } catch (Exception e) { } } } long endSingle = System.currentTimeMillis(); logger.info("{} Geted : {} ms\n", binary.getFileName(), (endSingle - startSingle)); logger.info("ABS update method: end."); return rtnBinary; }
From source file:com.nominanuda.web.http.HttpCoreHelper.java
public HttpMessage deserialize(InputStream is) throws IOException, HttpException { PushbackInputStream pis = new PushbackInputStream(is, 4); byte[] bb = new byte[4]; Check.illegalargument.assertTrue(4 == pis.read(bb), "premature end of stream"); pis.unread(bb); return bb[0] == 'H' && bb[1] == 'T' && bb[2] == 'T' && bb[3] == 'P' ? deserializeResponse(pis) : deserializeRequest(pis);// ww w . j a v a 2 s .c o m }
From source file:com.temenos.interaction.media.odata.xml.atom.AtomXMLProvider.java
/** * Method to verify if receieved stream has content or its empty * @param stream Stream to check//from w w w .ja v a 2 s . c o m * @return verified stream * @throws IOException */ private InputStream verifyContentReceieved(InputStream stream) throws IOException { if (stream == null) { // Check if its null LOGGER.debug("Request stream received as null"); return null; } else if (stream.markSupported()) { // Check stream supports mark/reset // mark() and read the first byte just to check stream.mark(1); final int bytesRead = stream.read(new byte[1]); if (bytesRead != -1) { //stream not empty stream.reset(); // reset the stream as if untouched return stream; } else { //stream empty LOGGER.debug("Request received with empty body"); return null; } } else { // Panic! this stream does not support mark/reset, try with PushbackInputStream as a last resort int bytesRead; PushbackInputStream pbs = new PushbackInputStream(stream); if ((bytesRead = pbs.read()) != -1) { // Contents detected, unread and return pbs.unread(bytesRead); return pbs; } else { // Empty stream detected LOGGER.debug("Request received with empty body!"); return null; } } }
From source file:com.digitalpebble.storm.crawler.protocol.http.HttpResponse.java
private void parseHeaders(PushbackInputStream in, StringBuffer line) throws IOException, HttpException { while (readLine(in, line, true) != 0) { // handle HTTP responses with missing blank line after headers int pos;//from ww w. j a va2 s .c om if (((pos = line.indexOf("<!DOCTYPE")) != -1) || ((pos = line.indexOf("<HTML")) != -1) || ((pos = line.indexOf("<html")) != -1)) { in.unread(line.substring(pos).getBytes(StandardCharsets.UTF_8)); line.setLength(pos); try { // TODO: (CM) We don't know the header names here // since we're just handling them generically. It would // be nice to provide some sort of mapping function here // for the returned header names to the standard metadata // names in the ParseData class processHeaderLine(line); } catch (Exception e) { // fixme: HttpProtocol.LOGGER.warn("Error: ", e); } return; } processHeaderLine(line); } }
From source file:com.handywedge.binarystore.store.aws.BinaryStoreManagerImpl.java
@Override public BinaryInfo upload(StorageInfo storage, BinaryInfo binary, InputStream inStream) throws StoreException { logger.debug("={}", storage); logger.debug("?={}", binary); AmazonS3 s3client = getS3Client(binary.getBucketName()); ObjectMetadata oMetadata = new ObjectMetadata(); oMetadata.setContentType(binary.getContentType()); // ???/*w w w . j a v a2s .c o m*/ InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), oMetadata); InitiateMultipartUploadResult initResponse = s3client.initiateMultipartUpload(initRequest); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); long written = IOUtils.copyLarge(inStream, baos, 0, BINARY_PART_SIZE_5MB); byte[] data = baos.toByteArray(); InputStream awsInputStream = new ByteArrayInputStream(data); if (written < BINARY_PART_SIZE_5MB) { oMetadata.setContentLength(written); s3client.putObject(binary.getBucketName(), binary.getFileName(), awsInputStream, oMetadata); } else { int firstByte = 0; int partNumber = 1; boolean isFirstChunck = true; boolean overSizeLimit = false; List<PartETag> partETags = new ArrayList<PartETag>(); InputStream firstChunck = new ByteArrayInputStream(data); PushbackInputStream chunckableInputStream = new PushbackInputStream(inStream, 1); long maxSize = BINARY_PART_SIZE_5MB * 1024; String maxSizeStr = "5GB"; String prefix = MDC.get("requestId"); while (-1 != (firstByte = chunckableInputStream.read())) { long partSize = 0; chunckableInputStream.unread(firstByte); File tempFile = File.createTempFile(prefix.concat("-part").concat(String.valueOf(partNumber)), null); tempFile.deleteOnExit(); try (OutputStream os = new BufferedOutputStream( new FileOutputStream(tempFile.getAbsolutePath()))) { if (isFirstChunck) { partSize = IOUtils.copyLarge(firstChunck, os, 0, (BINARY_PART_SIZE_5MB)); isFirstChunck = false; } else { partSize = IOUtils.copyLarge(chunckableInputStream, os, 0, (BINARY_PART_SIZE_5MB)); } written += partSize; if (written > maxSize) { // 5GB overSizeLimit = true; logger.warn("OVERSIZED FILE ({}). STARTING ABORT", written); break; } } FileInputStream chunk = new FileInputStream(tempFile); Boolean isLastPart = -1 == (firstByte = chunckableInputStream.read()); if (!isLastPart) { chunckableInputStream.unread(firstByte); } oMetadata.setContentLength(partSize); UploadPartRequest uploadRequest = new UploadPartRequest().withBucketName(binary.getBucketName()) .withKey(binary.getFileName()).withUploadId(initResponse.getUploadId()) .withObjectMetadata(oMetadata).withInputStream(chunk).withPartSize(partSize) .withPartNumber(partNumber).withLastPart(isLastPart); UploadPartResult result = s3client.uploadPart(uploadRequest); partETags.add(result.getPartETag()); partNumber++; } if (overSizeLimit) { ListMultipartUploadsRequest listRequest = new ListMultipartUploadsRequest( binary.getBucketName()); MultipartUploadListing listResult = s3client.listMultipartUploads(listRequest); int timesIterated = 20; // loop and abort all the multipart uploads while (listResult.getMultipartUploads().size() != 0 && timesIterated > 0) { s3client.abortMultipartUpload(new AbortMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), initResponse.getUploadId())); Thread.sleep(1000); timesIterated--; listResult = s3client.listMultipartUploads(listRequest); logger.debug("Files that haven't been aborted are: {}", listResult.getMultipartUploads().listIterator().toString()); } if (timesIterated == 0) { logger.warn("Files parts that couldn't be aborted in 20 seconds are:"); Iterator<MultipartUpload> multipartUploadIterator = listResult.getMultipartUploads() .iterator(); while (multipartUploadIterator.hasNext()) { logger.warn(multipartUploadIterator.next().getKey()); } } throw new StoreException(HttpStatus.SC_REQUEST_TOO_LONG, ErrorClassification.UPLOAD_TOO_LARGE, maxSizeStr); } else { CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest( binary.getBucketName(), binary.getFileName(), initResponse.getUploadId(), partETags); CompleteMultipartUploadResult comMPUResult = s3client.completeMultipartUpload(compRequest); logger.debug("CompleteMultipartUploadResult={}", comMPUResult); } } } catch (AmazonServiceException ase) { s3client.abortMultipartUpload(new AbortMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), initResponse.getUploadId())); throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, ase, binary.toString()); } catch (AmazonClientException ace) { s3client.abortMultipartUpload(new AbortMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), initResponse.getUploadId())); throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, ace, binary.toString()); } catch (IOException ioe) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, ioe, binary.toString()); } catch (InterruptedException itre) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, itre, binary.toString()); } finally { if (inStream != null) { try { inStream.close(); } catch (Exception e) { } } } return getBinaryInfo(s3client, binary.getBucketName(), binary.getFileName()); }
From source file:com.github.lucapino.sheetmaker.renderer.GmTemplateRenderer.java
private InputStream checkForUtf8BOMAndDiscardIfAny(InputStream inputStream) throws IOException { PushbackInputStream pushbackInputStream = new PushbackInputStream(new BufferedInputStream(inputStream), 3); byte[] bom = new byte[3]; if (pushbackInputStream.read(bom) != -1) { if (!(bom[0] == (byte) 0xEF && bom[1] == (byte) 0xBB && bom[2] == (byte) 0xBF)) { pushbackInputStream.unread(bom); }// w w w.ja v a2s . c o m } return pushbackInputStream; }