List of usage examples for java.io ByteArrayOutputStream size
public synchronized int size()
From source file:gov.utah.dts.det.ccl.actions.reports.ReportsPrintAction.java
@Action(value = "print-license-renewal-list") public void doPrintLicenseRenewalList() { Person person = null;//from w w w . j a v a 2 s . com if (specialistId != null) { person = personService.getPerson(specialistId); } if (person == null || person.getId() == null) { return; } try { // Default endDate to the last day of the current month if (endDate == null) { Calendar cal = Calendar.getInstance(); int maxDay = cal.getActualMaximum(Calendar.DAY_OF_MONTH); cal.set(Calendar.DAY_OF_MONTH, maxDay); endDate = cal.getTime(); } List<FacilityLicenseView> licenses = facilityService.getRenewalLicensesBySpecialist(endDate, specialistId); ByteArrayOutputStream ba = LicenseRenewalsListReport.generate(person, endDate, licenses); if (ba != null && ba.size() > 0) { // This is where the response is set String filename = ""; if (person != null) { if (StringUtils.isNotBlank(person.getFirstName())) { filename += person.getFirstName(); } if (StringUtils.isNotBlank(person.getLastName())) { if (filename.length() > 0) { filename += "_"; } filename += person.getLastName(); } } if (filename.length() > 0) { filename += "_"; } filename += "license_renewal_list.pdf"; sendToResponse(ba, filename); } } catch (Exception ex) { generateErrorPdf(); } }
From source file:gov.utah.dts.det.ccl.actions.reports.ReportsPrintAction.java
@Action(value = "print-livescans-issued") public void doPrintLivescansIssued() { Person person = null;/* w ww .j av a2s . c o m*/ if (technicianId != null) { person = personService.getPerson(technicianId); } if (person == null || person.getId() == null) { return; } try { List<TrackingRecordScreening> screenings = screeningService.getLivescansIssued(technicianId, startDate, endDate); ByteArrayOutputStream ba = LivescansIssuedReport.generate(person, startDate, endDate, screenings); if (ba != null && ba.size() > 0) { // This is where the response is set String filename = ""; if (person != null) { if (StringUtils.isNotBlank(person.getFirstName())) { filename += person.getFirstName(); } if (StringUtils.isNotBlank(person.getLastName())) { if (filename.length() > 0) { filename += "_"; } filename += person.getLastName(); } } if (filename.length() > 0) { filename += "_"; } filename += "livescans_issued.pdf"; sendToResponse(ba, filename); } } catch (Exception ex) { generateErrorPdf(); } }
From source file:org.apache.hadoop.hbase.io.hfile.TestIncrementalEncoding.java
public void testEncoding(DataBlockEncoding dataEncoding, boolean includeMemstoreTS, int kvType) throws IOException { LOG.info("encoding=" + dataEncoding + ", includeMemstoreTS=" + includeMemstoreTS + ", " + "kvType=" + kvType);/*from w w w.j a v a2s . co m*/ HFileDataBlockEncoder blockEncoder = new HFileDataBlockEncoderImpl(dataEncoding); HFileBlock.Writer writerEncoded = new HFileBlock.Writer(null, blockEncoder, includeMemstoreTS); HFileBlock.Writer writerUnencoded = new HFileBlock.Writer(null, NoOpDataBlockEncoder.INSTANCE, includeMemstoreTS); writerEncoded.startWriting(BlockType.DATA); writerUnencoded.startWriting(BlockType.DATA); // Fill block with data long time = 1 << 10; while (writerEncoded.blockSizeWritten() < BLOCK_SIZE) { KeyValue kv; switch (kvType) { case 3: kv = new KeyValue(Bytes.toBytes(time), FAMILY, Bytes.toBytes(time), time, Bytes.toBytes(time)); break; case 2: kv = new KeyValue(Bytes.toBytes("row"), FAMILY, Bytes.toBytes("qf" + time), 0, Bytes.toBytes("V")); break; case 1: kv = new KeyValue(Bytes.toBytes("row"), FAMILY, Bytes.toBytes("qf" + time), time, Bytes.toBytes("V" + time)); break; default: kv = new KeyValue(Bytes.toBytes("row" + time), FAMILY, Bytes.toBytes("qf"), 0, Bytes.toBytes("Value")); } time++; appendEncoded(kv, writerEncoded); appendEncoded(kv, writerUnencoded); } ByteArrayOutputStream encoded = new ByteArrayOutputStream(); writerEncoded.writeHeaderAndData(new DataOutputStream(encoded)); ByteArrayOutputStream unencoded = new ByteArrayOutputStream(); writerUnencoded.writeHeaderAndData(new DataOutputStream(unencoded)); ByteArrayOutputStream encodedAgain = new ByteArrayOutputStream(); DataOutputStream dataOut = new DataOutputStream(encodedAgain); int bytesToSkip = HFileBlock.HEADER_SIZE; ByteBuffer unencodedWithoutHeader = ByteBuffer .wrap(unencoded.toByteArray(), bytesToSkip, unencoded.size() - bytesToSkip).slice(); dataEncoding.getEncoder().encodeKeyValues(dataOut, unencodedWithoutHeader, includeMemstoreTS); assertEquals(encodedAgain.size() + HFileBlock.HEADER_SIZE + dataEncoding.encodingIdSize(), encoded.size()); byte[] en = encoded.toByteArray(); byte[] en2 = encodedAgain.toByteArray(); int shift = HFileBlock.HEADER_SIZE + dataEncoding.encodingIdSize(); for (int i = 0; i < encodedAgain.size(); i++) { assertEquals("Byte" + i, en2[i], en[i + shift]); } }
From source file:org.openrepose.nodeservice.httpcomponent.HttpComponentRequestProcessor.java
private int getEntityLength() throws IOException { if (StringUtilities.nullSafeEqualsIgnoreCase(sourceRequest.getHeader("transfer-encoding"), "chunked") || isConfiguredChunked) {//from www. j a v a 2 s . c o m return -1; } else { // todo: optimize so subsequent calls to this method do not need to read/copy the entity final ByteArrayOutputStream sourceEntity = new ByteArrayOutputStream(); RawInputStreamReader.instance().copyTo(sourceRequest.getInputStream(), sourceEntity); final ServletInputStream readableEntity = new BufferedServletInputStream( new ByteArrayInputStream(sourceEntity.toByteArray())); sourceRequest = new HttpServletRequestWrapper(sourceRequest, readableEntity); return sourceEntity.size(); } }
From source file:org.gradle.caching.internal.packaging.impl.TarBuildCacheEntryPacker.java
private void packMetadata(OriginWriter writeMetadata, TarArchiveOutputStream tarOutput) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); writeMetadata.execute(baos);//from w ww.j a va 2 s .co m createTarEntry(METADATA_PATH, baos.size(), UnixPermissions.FILE_FLAG | UnixPermissions.DEFAULT_FILE_PERM, tarOutput); tarOutput.write(baos.toByteArray()); tarOutput.closeArchiveEntry(); }
From source file:com.datatorrent.lib.io.HttpInputOperator.java
@Override public void run() { while (super.isActive()) { try {/*from w ww.j av a 2 s . c o m*/ ClientResponse response = resource.header("x-stream", "rockon").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); LOG.debug("Opening stream: " + resource); // media type check, if any, should be configuration based //if (!MediaType.APPLICATION_JSON_TYPE.equals(response.getType())) { // LOG.error("Unexpected response type " + response.getType()); // response.close(); //} else { InputStream is = response.getEntity(java.io.InputStream.class); while (true) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] bytes = new byte[255]; int bytesRead; while ((bytesRead = is.read(bytes)) != -1) { LOG.debug("read {} bytes", bytesRead); bos.write(bytes, 0, bytesRead); if (is.available() == 0 && bos.size() > 0) { // give chance to process what we have before blocking on read break; } } if (processBytes(bos.toByteArray())) { LOG.debug("End of chunked input stream."); response.close(); break; } if (bytesRead == -1) { LOG.error("Unexpected end of chunked input stream"); response.close(); break; } bos.reset(); } //} } catch (Exception e) { LOG.error("Error reading from " + resource.getURI(), e); } try { Thread.sleep(500); } catch (InterruptedException e) { LOG.info("Exiting IO loop {}.", e.toString()); break; } } }
From source file:org.apache.sshd.PortForwardingLoadTest.java
@Test public void testLocalForwardingPayload() throws Exception { final int NUM_ITERATIONS = 100; final String PAYLOAD_TMP = "This is significantly longer Test Data. This is significantly " + "longer Test Data. This is significantly longer Test Data. This is significantly " + "longer Test Data. This is significantly longer Test Data. This is significantly " + "longer Test Data. This is significantly longer Test Data. This is significantly " + "longer Test Data. This is significantly longer Test Data. This is significantly " + "longer Test Data. "; StringBuilder sb = new StringBuilder(); for (int i = 0; i < 1000; i++) { sb.append(PAYLOAD_TMP);//from ww w . j a v a2s .c o m } final String PAYLOAD = sb.toString(); Session session = createSession(); final ServerSocket ss = new ServerSocket(0); int forwardedPort = ss.getLocalPort(); int sinkPort = getFreePort(); session.setPortForwardingL(sinkPort, "localhost", forwardedPort); final AtomicInteger conCount = new AtomicInteger(0); new Thread() { public void run() { try { for (int i = 0; i < NUM_ITERATIONS; ++i) { Socket s = ss.accept(); conCount.incrementAndGet(); InputStream is = s.getInputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buf = new byte[8192]; int l; while (baos.size() < PAYLOAD.length() && (l = is.read(buf)) > 0) { baos.write(buf, 0, l); } if (!PAYLOAD.equals(baos.toString())) { assertEquals(PAYLOAD, baos.toString()); } is = new ByteArrayInputStream(baos.toByteArray()); OutputStream os = s.getOutputStream(); while ((l = is.read(buf)) > 0) { os.write(buf, 0, l); } s.close(); } } catch (Exception e) { e.printStackTrace(); } } }.start(); Thread.sleep(50); for (int i = 0; i < NUM_ITERATIONS; i++) { Socket s = null; try { LoggerFactory.getLogger(getClass()).info("Iteration {}", i); s = new Socket("localhost", sinkPort); s.getOutputStream().write(PAYLOAD.getBytes()); s.getOutputStream().flush(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buf = new byte[8192]; int l; while (baos.size() < PAYLOAD.length() && (l = s.getInputStream().read(buf)) > 0) { baos.write(buf, 0, l); } assertEquals(PAYLOAD, baos.toString()); } catch (Exception e) { e.printStackTrace(); } finally { if (s != null) { s.close(); } } } session.delPortForwardingL(sinkPort); }
From source file:codes.thischwa.c5c.impl.LocalConnector.java
@Override public StreamContent resize(InputStream imageIn, String imageExt, Dimension dim) throws IOException { BufferedImage img = null;/* w w w. j av a 2 s .c o m*/ BufferedImage newImg = null; try { img = ImageIO.read(imageIn); newImg = Scalr.resize(img, Scalr.Method.BALANCED, Scalr.Mode.AUTOMATIC, dim.width, dim.height); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ImageIO.write(newImg, imageExt, baos); baos.flush(); return buildStreamContent(new ByteArrayInputStream(baos.toByteArray()), baos.size()); } catch (IllegalArgumentException | ImagingOpException e) { throw new IOException(e); } finally { if (img != null) img.flush(); if (newImg != null) newImg.flush(); } }
From source file:com.ge.predix.solsvc.blobstore.bootstrap.BlobstoreClientImpl.java
/** * Adds a new Blob to the binded bucket in the Object Store * * @param obj S3Object to be added/*from ww w . j a va2 s. c om*/ */ @Override public String saveBlob(S3Object obj) { if (obj == null) { this.log.error("put(): Empty file provided"); //$NON-NLS-1$ throw new RuntimeException("File is null"); //$NON-NLS-1$ } List<PartETag> partETags = new ArrayList<>(); String bucket = this.blobstoreConfig.getBucketName(); InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucket, obj.getKey()); InitiateMultipartUploadResult initResponse = this.s3Client.initiateMultipartUpload(initRequest); try (InputStream is = obj.getObjectContent();) { int i = 1; int currentPartSize = 0; ByteArrayOutputStream tempBuffer = new ByteArrayOutputStream(); int byteValue; while ((byteValue = is.read()) != -1) { tempBuffer.write(byteValue); currentPartSize = tempBuffer.size(); if (currentPartSize == (50 * 1024 * 1024)) //make this a const { byte[] b = tempBuffer.toByteArray(); ByteArrayInputStream byteStream = new ByteArrayInputStream(b); UploadPartRequest uploadPartRequest = new UploadPartRequest().withBucketName(bucket) .withKey(obj.getKey()).withUploadId(initResponse.getUploadId()).withPartNumber(i++) .withInputStream(byteStream).withPartSize(currentPartSize); partETags.add(this.s3Client.uploadPart(uploadPartRequest).getPartETag()); tempBuffer.reset(); } } this.log.info("currentPartSize: " + currentPartSize); //$NON-NLS-1$ ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setContentLength(currentPartSize); if (this.enableSSE) { objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); } obj.setObjectMetadata(objectMetadata); if (i == 1 && currentPartSize < (5 * 1024 * 1024)) // make this a const { this.s3Client.abortMultipartUpload( new AbortMultipartUploadRequest(bucket, obj.getKey(), initResponse.getUploadId())); byte[] b = tempBuffer.toByteArray(); ByteArrayInputStream byteStream = new ByteArrayInputStream(b); objectMetadata.setContentType(getContentType(b)); if (this.enableSSE) { objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); } obj.setObjectMetadata(objectMetadata); PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, obj.getKey(), byteStream, obj.getObjectMetadata()); this.s3Client.putObject(putObjectRequest); ObjectMetadata meta = this.s3Client.getObjectMetadata(bucket, obj.getKey()); Map<String, Object> headers = meta.getRawMetadata(); for (Map.Entry<String, Object> entry : headers.entrySet()) { this.log.info("Object Metadata -- " + entry.getKey() + ": " + entry.getValue().toString()); //$NON-NLS-1$ //$NON-NLS-2$ } return initResponse.getUploadId(); } if (currentPartSize > 0 && currentPartSize <= (50 * 1024 * 1024)) // make this a const { byte[] b = tempBuffer.toByteArray(); ByteArrayInputStream byteStream = new ByteArrayInputStream(b); this.log.info("currentPartSize: " + currentPartSize); //$NON-NLS-1$ this.log.info("byteArray: " + b); //$NON-NLS-1$ UploadPartRequest uploadPartRequest = new UploadPartRequest().withBucketName(bucket) .withKey(obj.getKey()).withUploadId(initResponse.getUploadId()).withPartNumber(i) .withInputStream(byteStream).withPartSize(currentPartSize); partETags.add(this.s3Client.uploadPart(uploadPartRequest).getPartETag()); } CompleteMultipartUploadRequest completeMultipartUploadRequest = new CompleteMultipartUploadRequest() .withBucketName(bucket).withPartETags(partETags).withUploadId(initResponse.getUploadId()) .withKey(obj.getKey()); this.s3Client.completeMultipartUpload(completeMultipartUploadRequest); return initResponse.getUploadId(); } catch (Exception e) { this.log.error("put(): Exception occurred in put(): " + e.getMessage()); //$NON-NLS-1$ this.s3Client.abortMultipartUpload( new AbortMultipartUploadRequest(bucket, obj.getKey(), initResponse.getUploadId())); throw new RuntimeException("put(): Exception occurred in put(): ", e); //$NON-NLS-1$ } }
From source file:org.apache.jmeter.protocol.tcp.sampler.TCPClientImpl.java
/** * Reads data until the defined EOL byte is reached. * If there is no EOL byte defined, then reads until * the end of the stream is reached./*w w w . j a v a2 s .co m*/ */ @Override public String read(InputStream is) throws ReadException { ByteArrayOutputStream w = new ByteArrayOutputStream(); try { byte[] buffer = new byte[4096]; int x = 0; while ((x = is.read(buffer)) > -1) { w.write(buffer, 0, x); if (useEolByte && (buffer[x - 1] == eolByte)) { break; } } // do we need to close byte array (or flush it?) if (log.isDebugEnabled()) { log.debug("Read: " + w.size() + "\n" + w.toString()); } return w.toString(charset); } catch (IOException e) { throw new ReadException("Error reading from server, bytes read: " + w.size(), e, w.toString()); } }