List of usage examples for java.io SequenceInputStream SequenceInputStream
public SequenceInputStream(InputStream s1, InputStream s2)
SequenceInputStream
by remembering the two arguments, which will be read in order, first s1
and then s2
, to provide the bytes to be read from this SequenceInputStream
. From source file:com.joyent.manta.client.MantaClient.java
/** * Puts an object into Manta.//from w w w .j av a2 s .c om * * @param rawPath The path to the Manta object. * @param source {@link InputStream} to copy object data from * @param contentLength the total length of the stream (-1 if unknown) * @param headers optional HTTP headers to include when copying the object * @param metadata optional user-supplied metadata for object * @return Manta response object * @throws IOException If an IO exception has occurred. * @throws MantaClientHttpResponseException If a http status code {@literal > 300} is returned. */ public MantaObjectResponse put(final String rawPath, final InputStream source, final long contentLength, final MantaHttpHeaders headers, final MantaMetadata metadata) throws IOException { Validate.notBlank(rawPath, "rawPath must not be blank"); Validate.notNull(source, "Input stream must not be null"); final String path = formatPath(rawPath); final ContentType contentType = ContentTypeLookup.findOrDefaultContentType(headers, ContentType.APPLICATION_OCTET_STREAM); final int preLoadSize = config.getUploadBufferSize(); final HttpEntity entity; /* We don't know how big the stream is, so we read N bytes from it and * see if it ends. If it ended, then we just convert that buffer into * an entity and pass it. If it didn't end, then we create new stream * that concatenates the bytes read with the source stream. * Unfortunately, this will put us in a chunked transfer encoding and * it will affect performance. */ if (contentLength < 0) { // If our stream is a FileInputStream, then we can pull the size off of it if (source.getClass().equals(FileInputStream.class)) { FileInputStream fsin = (FileInputStream) source; entity = new InputStreamEntity(fsin, fsin.getChannel().size(), contentType); } else { byte[] preLoad = new byte[preLoadSize]; int read = IOUtils.read(source, preLoad); // The total amount of bytes read was less than the preload size, // so we can just return a in-memory non-streaming entity if (read < preLoadSize) { entity = new ExposedByteArrayEntity(preLoad, 0, read, contentType); } else { ByteArrayInputStream bin = new ByteArrayInputStream(preLoad); SequenceInputStream sin = new SequenceInputStream(bin, source); entity = new InputStreamEntity(sin, contentType); } } /* We know how big the stream is, so we can decide if it is within our * preload threshold and load it into memory or if it isn't within the * threshold, we can pass it on as a streamed entity in non-chunked mode. */ } else { if (contentLength <= preLoadSize && contentLength <= Integer.MAX_VALUE) { byte[] preLoad = new byte[(int) contentLength]; IOUtils.read(source, preLoad); entity = new ExposedByteArrayEntity(preLoad, contentType); } else { entity = new InputStreamEntity(source, contentLength, contentType); } } return httpHelper.httpPut(path, headers, entity, metadata); }
From source file:jp.aegif.nemaki.businesslogic.impl.ContentServiceImpl.java
@Override public void appendAttachment(CallContext callContext, String repositoryId, Holder<String> objectId, Holder<String> changeToken, ContentStream contentStream, boolean isLastChunk, ExtensionsData extension) {//from www.j a va2s.c om Document document = contentDaoService.getDocument(repositoryId, objectId.getValue()); AttachmentNode attachment = getAttachment(repositoryId, document.getAttachmentNodeId()); InputStream is = attachment.getInputStream(); // Append SequenceInputStream sis = new SequenceInputStream(is, contentStream.getStream()); // appendStream will be used for a huge file, so avoid reading stream long length = attachment.getLength() + contentStream.getLength(); ContentStream cs = new ContentStreamImpl("content", BigInteger.valueOf(length), attachment.getMimeType(), sis); contentDaoService.updateAttachment(repositoryId, attachment, cs); writeChangeEvent(callContext, repositoryId, document, ChangeType.UPDATED); }
From source file:org.alfresco.opencmis.CMISConnector.java
public void appendContent(CMISNodeInfo nodeInfo, ContentStream contentStream, boolean isLastChunk) throws IOException { NodeRef nodeRef = nodeInfo.getNodeRef(); this.disableBehaviour(ContentModel.ASPECT_VERSIONABLE, nodeRef); if (!nodeService.hasAspect(nodeRef, ContentModel.ASPECT_CMIS_UPDATE_CONTEXT)) { Map<QName, Serializable> props = new HashMap<QName, Serializable>(); props.put(ContentModel.PROP_GOT_FIRST_CHUNK, true); nodeService.addAspect(nodeRef, ContentModel.ASPECT_CMIS_UPDATE_CONTEXT, props); }/*from w ww .jav a 2s.c om*/ ContentReader reader = contentService.getReader(nodeRef, ContentModel.PROP_CONTENT); InputStream existingContentInput = (reader != null ? reader.getContentInputStream() : null); InputStream input = contentStream.getStream(); ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true); OutputStream out = new BufferedOutputStream(writer.getContentOutputStream()); InputStream in = null; if (existingContentInput != null) { in = new SequenceInputStream(existingContentInput, input); } else { in = input; } try { IOUtils.copy(in, out); if (isLastChunk) { nodeService.removeAspect(nodeRef, ContentModel.ASPECT_CMIS_UPDATE_CONTEXT); getActivityPoster().postFileFolderUpdated(nodeInfo.isFolder(), nodeRef); createVersion(nodeRef, VersionType.MINOR, "Appended content stream"); } } finally { if (in != null) { in.close(); } if (out != null) { out.close(); } this.enableBehaviour(ContentModel.ASPECT_VERSIONABLE, nodeRef); } }