List of usage examples for java.io PushbackInputStream read
public int read() throws IOException
From source file:edu.mayo.trilliumbridge.webapp.TransformerController.java
/** * From http://stackoverflow.com/a/19137900/656853 */// w ww . ja v a 2 s. c o m private InputStream checkStreamIsNotEmpty(InputStream inputStream) throws IOException { if (inputStream == null) { throw new UserInputException(NO_CONTENT_ERROR_MSG); } PushbackInputStream pushbackInputStream = new PushbackInputStream(inputStream); int b; b = pushbackInputStream.read(); if (b == -1) { throw new UserInputException("No file or XML content body sent."); } pushbackInputStream.unread(b); return pushbackInputStream; }
From source file:com.digitalpebble.behemoth.io.warc.HttpResponse.java
private int parseStatusLine(PushbackInputStream in, StringBuilder line) throws IOException { // skip first character if "\n" if (peek(in) == '\n') { in.read(); }//from ww w . j av a2s . c o m readLine(in, line, false); int codeStart = line.indexOf(" "); int codeEnd = line.indexOf(" ", codeStart + 1); // handle lines with no plaintext result code, ie: // "HTTP/1.1 200" vs "HTTP/1.1 200 OK" if (codeEnd == -1) codeEnd = line.length(); int code; try { code = Integer.parseInt(line.substring(codeStart + 1, codeEnd)); } catch (NumberFormatException e) { throw new IOException("bad status line '" + line + "': " + e.getMessage(), e); } return code; }
From source file:in.arun.faces.fo.pdf.FoOutputStream.java
private PdfResult buildPdf(FoOutputStream foOutput) { byte[] responseText = foOutput.getBytes(); if (responseText == null) { return null; }//www.ja v a 2 s .c o m PdfResult result = new PdfResult(); try { PushbackInputStream pbis = new PushbackInputStream( new BufferedInputStream(new ByteArrayInputStream(responseText))); ByteArrayOutputStream baos = new ByteArrayOutputStream(); //Skip contentType text/html - Looking for bug fix! //pbis.skip(9); while (pbis.available() > 0) { pbis.mark(1); if (pbis.read() == '<') { pbis.unread('<'); break; } } //Transforming XML to PDF FopFactory fopFactory = FopFactory.newInstance(); Fop fop = fopFactory.newFop(MimeConstants.MIME_PDF, baos); TransformerFactory tFactory = TransformerFactory.newInstance(); Transformer transformer = tFactory.newTransformer(); Source src = new StreamSource(pbis); Result res = new SAXResult(fop.getDefaultHandler()); transformer.transform(src, res); result.contentType = MimeConstants.MIME_PDF; result.content = baos.toByteArray(); } catch (IOException | FOPException | TransformerException x) { logger.log(Level.SEVERE, "Error while trying to create PDF.", x); StringBuilder builder = new StringBuilder(); builder.append(x.getMessage()); builder.append("<br/>"); builder.append("<pre>"); String formattedFo = new String(responseText); formattedFo = formattedFo.replaceAll("<", "<"); formattedFo = formattedFo.replaceAll(">", ">"); builder.append(formattedFo); builder.append("</pre>"); result.contentType = "text/html"; result.content = builder.toString().getBytes(); } return result; }
From source file:com.handywedge.binarystore.store.azure.BinaryStoreManagerImpl.java
@SuppressWarnings("unused") @Override/*from ww w.j av a 2 s . co m*/ public BinaryInfo upload(StorageInfo storage, BinaryInfo binary, InputStream inStream) throws StoreException { logger.info("ABS update method: start."); logger.debug("" + storage.toString()); logger.debug("?" + binary.toString()); long startSingle = System.currentTimeMillis(); CloudBlobClient bClient = getABSClient(binary.getBucketName(), true); BinaryInfo rtnBinary = new BinaryInfo(); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); long written = IOUtils.copyLarge(inStream, baos, 0, BINARY_PART_SIZE_5MB); byte[] data = baos.toByteArray(); InputStream awsInputStream = new ByteArrayInputStream(data); CloudBlockBlob blob = bClient.getContainerReference(binary.getBucketName()) .getBlockBlobReference(binary.getFileName()); if (written < BINARY_PART_SIZE_5MB) { BlobOutputStream blobOutputStream = blob.openOutputStream(); int next = awsInputStream.read(); while (next != -1) { blobOutputStream.write(next); next = awsInputStream.read(); } blobOutputStream.close(); blob.downloadAttributes(); BlobProperties properties = blob.getProperties(); properties.setContentType(binary.getContentType()); blob.uploadProperties(); } else { int firstByte = 0; int partNumber = 1; Boolean isFirstChunck = true; Boolean overSizeLimit = false; List<BlockEntry> blockList = new ArrayList<BlockEntry>(); InputStream firstChunck = new ByteArrayInputStream(data); PushbackInputStream chunckableInputStream = new PushbackInputStream(inStream, 1); while (-1 != (firstByte = chunckableInputStream.read())) { long partSize = 0; chunckableInputStream.unread(firstByte); File tempFile = File.createTempFile( UUID.randomUUID().toString().concat("-part").concat(String.valueOf(partNumber)), "tmp"); tempFile.deleteOnExit(); OutputStream os = null; try { os = new BufferedOutputStream(new FileOutputStream(tempFile.getAbsolutePath())); if (isFirstChunck == true) { partSize = IOUtils.copyLarge(firstChunck, os, 0, (BINARY_PART_SIZE_5MB)); isFirstChunck = false; } else { partSize = IOUtils.copyLarge(chunckableInputStream, os, 0, (BINARY_PART_SIZE_5MB)); } written += partSize; if (written > BINARY_PART_SIZE_5MB * 1024) { // 5GB overSizeLimit = true; logger.error("OVERSIZED FILE ({}). STARTING ABORT", written); break; } } finally { IOUtils.closeQuietly(os); } FileInputStream chunk = new FileInputStream(tempFile); Boolean isLastPart = -1 == (firstByte = chunckableInputStream.read()); if (!isLastPart) { chunckableInputStream.unread(firstByte); } String blockId = Base64.encodeBase64String( String.format("BlockId%07d", partNumber).getBytes(StandardCharsets.UTF_8)); BlockEntry block = new BlockEntry(blockId); blockList.add(block); blob.uploadBlock(blockId, chunk, partSize); partNumber++; chunk.close(); } blob.commitBlockList(blockList); blob.downloadAttributes(); BlobProperties properties = blob.getProperties(); properties.setContentType(binary.getContentType()); blob.uploadProperties(); logger.debug("commitBlockList."); } if (blob.exists()) { rtnBinary = createReturnBinaryInfo(blob); } else { rtnBinary = binary; } } catch (StorageException se) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, se, binary.getFileName()); } catch (URISyntaxException ue) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, ue, binary.getFileName()); } catch (FileNotFoundException fe) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, fe, binary.getFileName()); } catch (IOException ioe) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, ioe, binary.getFileName()); } finally { if (inStream != null) { try { inStream.close(); } catch (Exception e) { } } } long endSingle = System.currentTimeMillis(); logger.info("{} Geted : {} ms\n", binary.getFileName(), (endSingle - startSingle)); logger.info("ABS update method: end."); return rtnBinary; }
From source file:com.temenos.interaction.media.odata.xml.atom.AtomXMLProvider.java
/** * Method to verify if receieved stream has content or its empty * @param stream Stream to check/* w w w .j av a 2s . c om*/ * @return verified stream * @throws IOException */ private InputStream verifyContentReceieved(InputStream stream) throws IOException { if (stream == null) { // Check if its null LOGGER.debug("Request stream received as null"); return null; } else if (stream.markSupported()) { // Check stream supports mark/reset // mark() and read the first byte just to check stream.mark(1); final int bytesRead = stream.read(new byte[1]); if (bytesRead != -1) { //stream not empty stream.reset(); // reset the stream as if untouched return stream; } else { //stream empty LOGGER.debug("Request received with empty body"); return null; } } else { // Panic! this stream does not support mark/reset, try with PushbackInputStream as a last resort int bytesRead; PushbackInputStream pbs = new PushbackInputStream(stream); if ((bytesRead = pbs.read()) != -1) { // Contents detected, unread and return pbs.unread(bytesRead); return pbs; } else { // Empty stream detected LOGGER.debug("Request received with empty body!"); return null; } } }
From source file:com.handywedge.binarystore.store.aws.BinaryStoreManagerImpl.java
@Override public BinaryInfo upload(StorageInfo storage, BinaryInfo binary, InputStream inStream) throws StoreException { logger.debug("={}", storage); logger.debug("?={}", binary); AmazonS3 s3client = getS3Client(binary.getBucketName()); ObjectMetadata oMetadata = new ObjectMetadata(); oMetadata.setContentType(binary.getContentType()); // ???/* ww w . j a v a 2 s. c o m*/ InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), oMetadata); InitiateMultipartUploadResult initResponse = s3client.initiateMultipartUpload(initRequest); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); long written = IOUtils.copyLarge(inStream, baos, 0, BINARY_PART_SIZE_5MB); byte[] data = baos.toByteArray(); InputStream awsInputStream = new ByteArrayInputStream(data); if (written < BINARY_PART_SIZE_5MB) { oMetadata.setContentLength(written); s3client.putObject(binary.getBucketName(), binary.getFileName(), awsInputStream, oMetadata); } else { int firstByte = 0; int partNumber = 1; boolean isFirstChunck = true; boolean overSizeLimit = false; List<PartETag> partETags = new ArrayList<PartETag>(); InputStream firstChunck = new ByteArrayInputStream(data); PushbackInputStream chunckableInputStream = new PushbackInputStream(inStream, 1); long maxSize = BINARY_PART_SIZE_5MB * 1024; String maxSizeStr = "5GB"; String prefix = MDC.get("requestId"); while (-1 != (firstByte = chunckableInputStream.read())) { long partSize = 0; chunckableInputStream.unread(firstByte); File tempFile = File.createTempFile(prefix.concat("-part").concat(String.valueOf(partNumber)), null); tempFile.deleteOnExit(); try (OutputStream os = new BufferedOutputStream( new FileOutputStream(tempFile.getAbsolutePath()))) { if (isFirstChunck) { partSize = IOUtils.copyLarge(firstChunck, os, 0, (BINARY_PART_SIZE_5MB)); isFirstChunck = false; } else { partSize = IOUtils.copyLarge(chunckableInputStream, os, 0, (BINARY_PART_SIZE_5MB)); } written += partSize; if (written > maxSize) { // 5GB overSizeLimit = true; logger.warn("OVERSIZED FILE ({}). STARTING ABORT", written); break; } } FileInputStream chunk = new FileInputStream(tempFile); Boolean isLastPart = -1 == (firstByte = chunckableInputStream.read()); if (!isLastPart) { chunckableInputStream.unread(firstByte); } oMetadata.setContentLength(partSize); UploadPartRequest uploadRequest = new UploadPartRequest().withBucketName(binary.getBucketName()) .withKey(binary.getFileName()).withUploadId(initResponse.getUploadId()) .withObjectMetadata(oMetadata).withInputStream(chunk).withPartSize(partSize) .withPartNumber(partNumber).withLastPart(isLastPart); UploadPartResult result = s3client.uploadPart(uploadRequest); partETags.add(result.getPartETag()); partNumber++; } if (overSizeLimit) { ListMultipartUploadsRequest listRequest = new ListMultipartUploadsRequest( binary.getBucketName()); MultipartUploadListing listResult = s3client.listMultipartUploads(listRequest); int timesIterated = 20; // loop and abort all the multipart uploads while (listResult.getMultipartUploads().size() != 0 && timesIterated > 0) { s3client.abortMultipartUpload(new AbortMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), initResponse.getUploadId())); Thread.sleep(1000); timesIterated--; listResult = s3client.listMultipartUploads(listRequest); logger.debug("Files that haven't been aborted are: {}", listResult.getMultipartUploads().listIterator().toString()); } if (timesIterated == 0) { logger.warn("Files parts that couldn't be aborted in 20 seconds are:"); Iterator<MultipartUpload> multipartUploadIterator = listResult.getMultipartUploads() .iterator(); while (multipartUploadIterator.hasNext()) { logger.warn(multipartUploadIterator.next().getKey()); } } throw new StoreException(HttpStatus.SC_REQUEST_TOO_LONG, ErrorClassification.UPLOAD_TOO_LARGE, maxSizeStr); } else { CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest( binary.getBucketName(), binary.getFileName(), initResponse.getUploadId(), partETags); CompleteMultipartUploadResult comMPUResult = s3client.completeMultipartUpload(compRequest); logger.debug("CompleteMultipartUploadResult={}", comMPUResult); } } } catch (AmazonServiceException ase) { s3client.abortMultipartUpload(new AbortMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), initResponse.getUploadId())); throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, ase, binary.toString()); } catch (AmazonClientException ace) { s3client.abortMultipartUpload(new AbortMultipartUploadRequest(binary.getBucketName(), binary.getFileName(), initResponse.getUploadId())); throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.UPLOAD_FAIL, ace, binary.toString()); } catch (IOException ioe) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, ioe, binary.toString()); } catch (InterruptedException itre) { throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, itre, binary.toString()); } finally { if (inStream != null) { try { inStream.close(); } catch (Exception e) { } } } return getBinaryInfo(s3client, binary.getBucketName(), binary.getFileName()); }
From source file:de.unisb.cs.st.javaslicer.traceResult.TraceResult.java
public TraceResult(File filename) throws IOException { final MultiplexedFileReader file = new MultiplexedFileReader(filename); if (file.getStreamIds().size() < 2) throw new IOException("corrupted data"); final MultiplexInputStream readClassesStream = file.getInputStream(0); if (readClassesStream == null) throw new IOException("corrupted data"); PushbackInputStream pushBackInput = new PushbackInputStream( new BufferedInputStream(new GZIPInputStream(readClassesStream, 512), 512), 1); final DataInputStream readClassesInputStream = new DataInputStream(pushBackInput); final ArrayList<ReadClass> readClasses0 = new ArrayList<ReadClass>(); final StringCacheInput stringCache = new StringCacheInput(); int testRead; while ((testRead = pushBackInput.read()) != -1) { pushBackInput.unread(testRead);//from w w w. j a v a2s. c om readClasses0.add(ReadClass.readFrom(readClassesInputStream, stringCache)); } readClasses0.trimToSize(); Collections.sort(readClasses0); this.readClasses = readClasses0; this.instructions = getInstructionArray(readClasses0); final MultiplexInputStream threadTracersStream = file.getInputStream(1); if (threadTracersStream == null) throw new IOException("corrupted data"); pushBackInput = new PushbackInputStream( new BufferedInputStream(new GZIPInputStream(threadTracersStream, 512), 512), 1); final DataInputStream threadTracersInputStream = new DataInputStream(pushBackInput); final ArrayList<ThreadTraceResult> threadTraces0 = new ArrayList<ThreadTraceResult>(); while ((testRead = pushBackInput.read()) != -1) { pushBackInput.unread(testRead); threadTraces0.add(ThreadTraceResult.readFrom(threadTracersInputStream, this, file)); } threadTraces0.trimToSize(); Collections.sort(threadTraces0); this.threadTraces = threadTraces0; }
From source file:com.zimbra.cs.pop3.Pop3Handler.java
private void sendMessage(InputStream is, int maxNumBodyLines) throws IOException { boolean inBody = false; int numBodyLines = 0; PushbackInputStream stream = new PushbackInputStream(is); int c;//from w ww .j a va 2 s.c o m boolean startOfLine = true; int lineLength = 0; while ((c = stream.read()) != -1) { if (c == '\r' || c == '\n') { if (c == '\r') { int peek = stream.read(); if (peek != '\n' && peek != -1) stream.unread(peek); } if (!inBody) { if (lineLength == 0) inBody = true; } else { numBodyLines++; } startOfLine = true; lineLength = 0; output.write(LINE_SEPARATOR); if (inBody && numBodyLines >= maxNumBodyLines) { break; } continue; } else if (c == TERMINATOR_C && startOfLine) { output.write(c); // we'll end up writing it twice } if (startOfLine) startOfLine = false; lineLength++; output.write(c); } if (lineLength != 0) { output.write(LINE_SEPARATOR); } output.write(TERMINATOR_BYTE); output.write(LINE_SEPARATOR); output.flush(); }
From source file:com.temenos.interaction.media.hal.HALProvider.java
/** * Reads a Hypertext Application Language (HAL) representation of * {@link EntityResource} from the input stream. * //from w ww. ja va 2s . co m * @precondition {@link InputStream} contains a valid HAL <resource/> document * @postcondition {@link EntityResource} will be constructed and returned. * @invariant valid InputStream */ @Override public RESTResource readFrom(Class<RESTResource> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { /* To detect if the stream is empty (a valid case since an input entity is * sometimes optional), wrap in a PushbackInputStream before passing on */ PushbackInputStream wrappedStream = new PushbackInputStream(entityStream); int firstByte = wrappedStream.read(); uriInfo = new UriInfoImpl(uriInfo); if (firstByte == -1) { // No data provided return null; } else { // There is something in the body, so we will parse it. It is required // to be a valid JSON object. First replace the byte we borrowed. wrappedStream.unread(firstByte); //Parse hal+json into an Entity object Entity entity; try { entity = buildEntityFromHal(wrappedStream, mediaType); } catch (MethodNotAllowedException e) { if (logger.isDebugEnabled()) { logger.debug("Error building the entity.", e); } StringBuilder allowHeader = new StringBuilder(); Set<String> allowedMethods = new HashSet<String>(e.getAllowedMethods()); allowedMethods.add("HEAD"); allowedMethods.add("OPTIONS"); for (String method : allowedMethods) { allowHeader.append(method); allowHeader.append(", "); } Response response = Response.status(405) .header("Allow", allowHeader.toString().substring(0, allowHeader.length() - 2)).build(); throw new WebApplicationException(response); } return new EntityResource<Entity>(entity); } }