List of usage examples for java.io InputStream mark
public synchronized void mark(int readlimit)
From source file:org.rssowl.core.internal.connection.DefaultProtocolHandler.java
private InputStream pipeStream(InputStream inputStream, HttpMethodBase method) throws IOException { Assert.isNotNull(inputStream);/* w w w .ja v a 2s . co m*/ /* Retrieve the Content Encoding */ String contentEncoding = method.getResponseHeader(HEADER_RESPONSE_CONTENT_ENCODING) != null ? method.getResponseHeader(HEADER_RESPONSE_CONTENT_ENCODING).getValue() : null; boolean isGzipStream = false; /* * Return in case the Content Encoding is not given and the InputStream does * not support mark() and reset() */ if ((contentEncoding == null || !contentEncoding.equals("gzip")) && !inputStream.markSupported()) //$NON-NLS-1$ return inputStream; /* Content Encoding is set to gzip, so use the GZipInputStream */ if (contentEncoding != null && contentEncoding.equals("gzip")) { //$NON-NLS-1$ isGzipStream = true; } /* Detect if the Stream is gzip encoded */ else if (inputStream.markSupported()) { inputStream.mark(2); int id1 = inputStream.read(); int id2 = inputStream.read(); inputStream.reset(); /* Check for GZip Magic Numbers (See RFC 1952) */ if (id1 == 0x1F && id2 == 0x8B) isGzipStream = true; } /* Create the GZipInputStream then */ if (isGzipStream) { try { return new GZIPInputStream(inputStream); } catch (IOException e) { return inputStream; } } return inputStream; }
From source file:org.apache.openjpa.meta.AbstractCFMetaDataFactory.java
/** * Parse persistent type names.//from w ww . jav a 2s . com */ protected Set<String> parsePersistentTypeNames(ClassLoader loader) throws IOException { ClassArgParser cparser = newClassArgParser(); String[] clss; Set<String> names = new HashSet<String>(); if (files != null) { File file; for (Iterator itr = files.iterator(); itr.hasNext();) { file = (File) itr.next(); if ((AccessController.doPrivileged(J2DoPrivHelper.isDirectoryAction(file))).booleanValue()) { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-directory", file)); scan(new FileMetaDataIterator(file, newMetaDataFilter()), cparser, names, true, file); } else if (file.getName().endsWith(".jar")) { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-jar", file)); try { ZipFile zFile = AccessController.doPrivileged(J2DoPrivHelper.newZipFileAction(file)); scan(new ZipFileMetaDataIterator(zFile, newMetaDataFilter()), cparser, names, true, file); } catch (PrivilegedActionException pae) { throw (IOException) pae.getException(); } } else { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-file", file)); clss = cparser.parseTypeNames(new FileMetaDataIterator(file)); List<String> newNames = Arrays.asList(clss); if (log.isTraceEnabled()) log.trace(_loc.get("scan-found-names", newNames, file)); names.addAll(newNames); File f = AccessController.doPrivileged(J2DoPrivHelper.getAbsoluteFileAction(file)); try { mapPersistentTypeNames(AccessController.doPrivileged(J2DoPrivHelper.toURLAction(f)), clss); } catch (PrivilegedActionException pae) { throw (FileNotFoundException) pae.getException(); } } } } URL url; if (urls != null) { for (Iterator itr = urls.iterator(); itr.hasNext();) { url = (URL) itr.next(); if ("file".equals(url.getProtocol())) { File file = AccessController .doPrivileged(J2DoPrivHelper.getAbsoluteFileAction(new File(url.getFile()))); if (files != null && files.contains(file)) { continue; } else if ((AccessController.doPrivileged(J2DoPrivHelper.isDirectoryAction(file))) .booleanValue()) { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-directory", file)); scan(new FileMetaDataIterator(file, newMetaDataFilter()), cparser, names, true, file); continue; } } if ("vfs".equals(url.getProtocol())) { if (log.isTraceEnabled()) { log.trace(_loc.get("scanning-vfs-url", url)); } final URLConnection conn = url.openConnection(); final Object vfsContent = conn.getContent(); final URL finalUrl = url; File file = AccessController.doPrivileged(new PrivilegedAction<File>() { @SuppressWarnings({ "rawtypes", "unchecked" }) public File run() { try { Class virtualFileClass = Class.forName("org.jboss.vfs.VirtualFile"); Method getPhysicalFile = virtualFileClass.getDeclaredMethod("getPhysicalFile"); return (File) getPhysicalFile.invoke(vfsContent); } catch (Exception e) { log.error(_loc.get("while-scanning-vfs-url", finalUrl), e); } return null; } }); if (file != null) scan(new FileMetaDataIterator(file, newMetaDataFilter()), cparser, names, true, file); continue; } if ("jar".equals(url.getProtocol())) { if (url.getPath().endsWith("!/")) { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-jar-url", url)); scan(new ZipFileMetaDataIterator(url, newMetaDataFilter()), cparser, names, true, url); } else { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-jar-url", url)); scan(new JarFileURLMetaDataIterator(url, newMetaDataFilter()), cparser, names, true, url); } } else if (url.getPath().endsWith(".jar")) { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-jar-at-url", url)); try { InputStream is = (InputStream) AccessController .doPrivileged(J2DoPrivHelper.openStreamAction(url)); scan(new ZipStreamMetaDataIterator(new ZipInputStream(is), newMetaDataFilter()), cparser, names, true, url); } catch (PrivilegedActionException pae) { throw (IOException) pae.getException(); } } else { // Open an InputStream from the URL and sniff for a zip header. If it is, then this is // a URL with a jar-formated InputStream, as per the JPA specification. Otherwise, fall back // to URLMetaDataIterator. BufferedInputStream is = null; try { is = new BufferedInputStream( (InputStream) AccessController.doPrivileged(J2DoPrivHelper.openStreamAction(url))); } catch (PrivilegedActionException pae) { throw (IOException) pae.getException(); } // Check for zip header magic 0x50 0x4b 0x03 0x04 is.mark(0); boolean zipHeaderMatch = is.read() == 0x50 && is.read() == 0x4b && is.read() == 0x03 && is.read() == 0x04; is.reset(); if (zipHeaderMatch) { // The URL provides a Jar-formatted InputStream, consume it with ZipStreamMetaDataIterator if (log.isTraceEnabled()) log.trace(_loc.get("scanning-jar-at-url", url)); scan(new ZipStreamMetaDataIterator(new ZipInputStream(is), newMetaDataFilter()), cparser, names, true, url); } else { // Fall back to URLMetaDataIterator if (log.isTraceEnabled()) log.trace(_loc.get("scanning-url", url)); clss = cparser.parseTypeNames(new URLMetaDataIterator(url)); List<String> newNames = Arrays.asList(clss); if (log.isTraceEnabled()) log.trace(_loc.get("scan-found-names", newNames, url)); names.addAll(newNames); mapPersistentTypeNames(url, clss); } } } } if (rsrcs != null) { String rsrc; MetaDataIterator mitr; for (Iterator itr = rsrcs.iterator(); itr.hasNext();) { rsrc = (String) itr.next(); if (rsrc.endsWith(".jar")) { url = AccessController.doPrivileged(J2DoPrivHelper.getResourceAction(loader, rsrc)); if (url != null) { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-jar-stream-url", url)); try { InputStream is = (InputStream) AccessController .doPrivileged(J2DoPrivHelper.openStreamAction(url)); scan(new ZipStreamMetaDataIterator(new ZipInputStream(is), newMetaDataFilter()), cparser, names, true, url); } catch (PrivilegedActionException pae) { throw (IOException) pae.getException(); } } } else { if (log.isTraceEnabled()) log.trace(_loc.get("scanning-resource", rsrc)); mitr = new ResourceMetaDataIterator(rsrc, loader); OpenJPAConfiguration conf = repos.getConfiguration(); Map peMap = null; if (conf instanceof OpenJPAConfigurationImpl) peMap = ((OpenJPAConfigurationImpl) conf).getPersistenceEnvironment(); URL puUrl = peMap == null ? null : (URL) peMap.get(PERSISTENCE_UNIT_ROOT_URL); List<String> mappingFileNames = peMap == null ? null : (List<String>) peMap.get(MAPPING_FILE_NAMES); List<URL> jars = peMap == null ? null : (List<URL>) peMap.get(JAR_FILE_URLS); String puUrlString = puUrl == null ? null : puUrl.toString(); if (log.isTraceEnabled()) log.trace(_loc.get("pu-root-url", puUrlString)); URL puORMUrl = null; try { if (puUrlString != null) { String puORMUrlStr = puUrlString + (puUrlString.endsWith("/") ? "" : "/") + rsrc; puORMUrl = AccessController.doPrivileged(J2DoPrivHelper.createURL(puORMUrlStr)); } } catch (PrivilegedActionException e) { throw new IOException("Error generating puORMUrlStr.", e.getCause()); } List<URL> urls = new ArrayList<URL>(3); while (mitr.hasNext()) { url = (URL) mitr.next(); String urlString = url.toString(); if (log.isTraceEnabled()) log.trace(_loc.get("resource-url", urlString)); if (peMap != null) { //OPENJPA-2102: decode the URL to remove such things a spaces (' ') encoded as '%20' if (puUrlString != null && decode(urlString).indexOf(decode(puUrlString)) != -1) { urls.add(url); } else if (puORMUrl != null && puORMUrl.equals(url)) { // Check URL equality to support encapsulating URL protocols urls.add(url); } if (mappingFileNames != null && mappingFileNames.size() != 0) { for (String mappingFileName : mappingFileNames) { if (log.isTraceEnabled()) log.trace(_loc.get("mapping-file-name", mappingFileName)); if (urlString.indexOf(mappingFileName) != -1) urls.add(url); } } if (jars != null && jars.size() != 0) { for (URL jarUrl : jars) { if (log.isTraceEnabled()) log.trace(_loc.get("jar-file-url", jarUrl)); if (urlString.indexOf(jarUrl.toString()) != -1) urls.add(url); } } } else { urls.add(url); } } mitr.close(); for (Object obj : urls) { url = (URL) obj; clss = cparser.parseTypeNames(new URLMetaDataIterator(url)); List<String> newNames = Arrays.asList(clss); if (log.isTraceEnabled()) log.trace(_loc.get("scan-found-names", newNames, rsrc)); names.addAll(newNames); mapPersistentTypeNames(url, clss); } } } } if (cpath != null) { String[] dirs = (String[]) cpath.toArray(new String[cpath.size()]); scan(new ClasspathMetaDataIterator(dirs, newMetaDataFilter()), cparser, names, true, dirs); } if (types != null) names.addAll(types); if (log.isTraceEnabled()) log.trace(_loc.get("parse-found-names", names)); return names; }
From source file:org.alfresco.rest.api.impl.NodesImpl.java
private void writeContent(NodeRef nodeRef, String fileName, InputStream stream, boolean guessEncoding) { try {// w w w . j a v a 2 s . c o m ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true); String mimeType = mimetypeService.guessMimetype(fileName); if ((mimeType != null) && (!mimeType.equals(MimetypeMap.MIMETYPE_BINARY))) { // quick/weak guess based on file extension writer.setMimetype(mimeType); } else { // stronger guess based on file stream writer.guessMimetype(fileName); } InputStream is = null; if (guessEncoding) { is = new BufferedInputStream(stream); is.mark(1024); writer.setEncoding(guessEncoding(is, mimeType, false)); try { is.reset(); } catch (IOException ioe) { if (logger.isWarnEnabled()) { logger.warn("Failed to reset stream after trying to guess encoding: " + ioe.getMessage()); } } } else { is = stream; } writer.putContent(is); } catch (ContentQuotaException cqe) { throw new InsufficientStorageException(); } catch (ContentLimitViolationException clv) { throw new RequestEntityTooLargeException(clv.getMessage()); } catch (ContentIOException cioe) { if (cioe.getCause() instanceof NodeLockedException) { throw (NodeLockedException) cioe.getCause(); } throw cioe; } }
From source file:org.apache.nifi.processors.standard.IdentifyMimeType.java
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile flowFile = session.get();// ww w . j a va 2s . com if (flowFile == null) { return; } final ProcessorLog logger = getLogger(); final boolean identifyZip = context.getProperty(IDENTIFY_ZIP).asBoolean(); final boolean identifyTar = context.getProperty(IDENTIFY_TAR).asBoolean(); final ObjectHolder<String> mimeTypeRef = new ObjectHolder<>(null); session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream stream) throws IOException { try (final InputStream in = new BufferedInputStream(stream)) { // read in up to magicHeaderMaxLength bytes in.mark(magicHeaderMaxLength); byte[] header = new byte[magicHeaderMaxLength]; for (int i = 0; i < header.length; i++) { final int next = in.read(); if (next >= 0) { header[i] = (byte) next; } else if (i == 0) { header = new byte[0]; } else { final byte[] newBuffer = new byte[i - 1]; System.arraycopy(header, 0, newBuffer, 0, i - 1); header = newBuffer; break; } } in.reset(); for (final MagicHeader magicHeader : magicHeaders) { if (magicHeader.matches(header)) { mimeTypeRef.set(magicHeader.getMimeType()); return; } } if (!identifyZip) { for (final MagicHeader magicHeader : zipMagicHeaders) { if (magicHeader.matches(header)) { mimeTypeRef.set(magicHeader.getMimeType()); return; } } } if (!identifyTar) { for (final MagicHeader magicHeader : tarMagicHeaders) { if (magicHeader.matches(header)) { mimeTypeRef.set(magicHeader.getMimeType()); return; } } } } } }); String mimeType = mimeTypeRef.get(); if (mimeType == null) { for (final ContentScanningMimeTypeIdentifier scanningIdentifier : this.contentScanners) { if (scanningIdentifier.isEnabled(context)) { session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { String mimeType = scanningIdentifier.getMimeType(in); if (mimeType != null) { mimeTypeRef.set(mimeType); } } }); if (mimeTypeRef.get() != null) { break; } } } } mimeType = mimeTypeRef.get(); if (mimeType == null) { flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/octet-stream"); logger.info("Unable to identify MIME Type for {}; setting to application/octet-stream", new Object[] { flowFile }); } else { flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), mimeType); logger.info("Identified {} as having MIME Type {}", new Object[] { flowFile, mimeType }); } session.getProvenanceReporter().modifyAttributes(flowFile); session.transfer(flowFile, REL_SUCCESS); }
From source file:com.adobe.phonegap.contentsync.Sync.java
private boolean unzipSync(File targetFile, String outputDirectory, ProgressEvent progress, CallbackContext callbackContext) { Log.d(LOG_TAG, "unzipSync called"); Log.d(LOG_TAG, "zip = " + targetFile.getAbsolutePath()); InputStream inputStream = null; ZipFile zip = null;//from www . j av a 2 s . c o m boolean anyEntries = false; try { synchronized (progress) { if (progress.isAborted()) { return false; } } zip = new ZipFile(targetFile); // Since Cordova 3.3.0 and release of File plugins, files are accessed via cdvfile:// // Accept a path or a URI for the source zip. Uri zipUri = getUriForArg(targetFile.getAbsolutePath()); Uri outputUri = getUriForArg(outputDirectory); CordovaResourceApi resourceApi = webView.getResourceApi(); File tempFile = resourceApi.mapUriToFile(zipUri); if (tempFile == null || !tempFile.exists()) { sendErrorMessage("Zip file does not exist", UNZIP_ERROR, callbackContext); } File outputDir = resourceApi.mapUriToFile(outputUri); outputDirectory = outputDir.getAbsolutePath(); outputDirectory += outputDirectory.endsWith(File.separator) ? "" : File.separator; if (outputDir == null || (!outputDir.exists() && !outputDir.mkdirs())) { sendErrorMessage("Could not create output directory", UNZIP_ERROR, callbackContext); } OpenForReadResult zipFile = resourceApi.openForRead(zipUri); progress.setStatus(STATUS_EXTRACTING); progress.setLoaded(0); progress.setTotal(zip.size()); Log.d(LOG_TAG, "zip file len = " + zip.size()); inputStream = new BufferedInputStream(zipFile.inputStream); inputStream.mark(10); int magic = readInt(inputStream); if (magic != 875721283) { // CRX identifier inputStream.reset(); } else { // CRX files contain a header. This header consists of: // * 4 bytes of magic number // * 4 bytes of CRX format version, // * 4 bytes of public key length // * 4 bytes of signature length // * the public key // * the signature // and then the ordinary zip data follows. We skip over the header before creating the ZipInputStream. readInt(inputStream); // version == 2. int pubkeyLength = readInt(inputStream); int signatureLength = readInt(inputStream); inputStream.skip(pubkeyLength + signatureLength); } // The inputstream is now pointing at the start of the actual zip file content. ZipInputStream zis = new ZipInputStream(inputStream); inputStream = zis; ZipEntry ze; byte[] buffer = new byte[32 * 1024]; while ((ze = zis.getNextEntry()) != null) { synchronized (progress) { if (progress.isAborted()) { return false; } } anyEntries = true; String compressedName = ze.getName(); if (ze.getSize() > getFreeSpace()) { return false; } if (ze.isDirectory()) { File dir = new File(outputDirectory + compressedName); dir.mkdirs(); } else { File file = new File(outputDirectory + compressedName); file.getParentFile().mkdirs(); if (file.exists() || file.createNewFile()) { Log.w(LOG_TAG, "extracting: " + file.getPath()); FileOutputStream fout = new FileOutputStream(file); int count; while ((count = zis.read(buffer)) != -1) { fout.write(buffer, 0, count); } fout.close(); } } progress.addLoaded(1); updateProgress(callbackContext, progress); zis.closeEntry(); } } catch (Exception e) { String errorMessage = "An error occurred while unzipping."; sendErrorMessage(errorMessage, UNZIP_ERROR, callbackContext); Log.e(LOG_TAG, errorMessage, e); } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { } } if (zip != null) { try { zip.close(); } catch (IOException e) { } } } if (anyEntries) return true; else return false; }
From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java
/** * Checks if this stream could be a bdoc input stream * @param is input stream, must support mark() and reset() operations! * @return true if bdoc/*ww w . j a va 2 s. c o m*/ */ private boolean isBdocFile(InputStream is) throws DigiDocException { try { if (is.markSupported()) is.mark(10); byte[] tdata = new byte[10]; int n = is.read(tdata); if (is.markSupported()) is.reset(); if (n >= 2 && tdata[0] == (byte) 'P' && tdata[1] == (byte) 'K') return true; // probably a zip file if (n >= 5 && tdata[0] == (byte) '<' && tdata[1] == (byte) '?' && tdata[2] == (byte) 'x' && tdata[3] == (byte) 'm' && tdata[4] == (byte) 'l') return false; // an xml file - probably ddoc format? } catch (Exception ex) { m_logger.error("Error determining file type: " + ex); } return false; }
From source file:com.smartsheet.api.internal.http.DefaultHttpClient.java
/** * Make an HTTP request and return the response. * * @param smartsheetRequest the smartsheet request * @return the HTTP response//from w w w . j av a 2 s. c o m * @throws HttpClientException the HTTP client exception */ public HttpResponse request(HttpRequest smartsheetRequest) throws HttpClientException { Util.throwIfNull(smartsheetRequest); if (smartsheetRequest.getUri() == null) { throw new IllegalArgumentException("A Request URI is required."); } int attempt = 0; long start = System.currentTimeMillis(); HttpRequestBase apacheHttpRequest; HttpResponse smartsheetResponse; InputStream bodyStream = null; if (smartsheetRequest.getEntity() != null && smartsheetRequest.getEntity().getContent() != null) { bodyStream = smartsheetRequest.getEntity().getContent(); } // the retry logic will consume the body stream so we make sure it supports mark/reset and mark it boolean canRetryRequest = bodyStream == null || bodyStream.markSupported(); if (!canRetryRequest) { try { // attempt to wrap the body stream in a input-stream that does support mark/reset bodyStream = new ByteArrayInputStream(StreamUtil.readBytesFromStream(bodyStream)); // close the old stream (just to be tidy) and then replace it with a reset-able stream smartsheetRequest.getEntity().getContent().close(); smartsheetRequest.getEntity().setContent(bodyStream); canRetryRequest = true; } catch (IOException ignore) { } } // the retry loop while (true) { apacheHttpRequest = createApacheRequest(smartsheetRequest); // Set HTTP headers if (smartsheetRequest.getHeaders() != null) { for (Map.Entry<String, String> header : smartsheetRequest.getHeaders().entrySet()) { apacheHttpRequest.addHeader(header.getKey(), header.getValue()); } } HttpEntitySnapshot requestEntityCopy = null; HttpEntitySnapshot responseEntityCopy = null; // Set HTTP entity final HttpEntity entity = smartsheetRequest.getEntity(); if (apacheHttpRequest instanceof HttpEntityEnclosingRequestBase && entity != null && entity.getContent() != null) { try { // we need access to the original request stream so we can log it (in the event of errors and/or tracing) requestEntityCopy = new HttpEntitySnapshot(entity); } catch (IOException iox) { logger.error("failed to make copy of original request entity - {}", iox); } InputStreamEntity streamEntity = new InputStreamEntity(entity.getContent(), entity.getContentLength()); streamEntity.setChunked(false); // why? not supported by library? ((HttpEntityEnclosingRequestBase) apacheHttpRequest).setEntity(streamEntity); } // mark the body so we can reset on retry if (canRetryRequest && bodyStream != null) { bodyStream.mark((int) smartsheetRequest.getEntity().getContentLength()); } // Make the HTTP request smartsheetResponse = new HttpResponse(); HttpContext context = new BasicHttpContext(); try { long startTime = System.currentTimeMillis(); apacheHttpResponse = this.httpClient.execute(apacheHttpRequest, context); long endTime = System.currentTimeMillis(); // Set request headers to values ACTUALLY SENT (not just created by us), this would include: // 'Connection', 'Accept-Encoding', etc. However, if a proxy is used, this may be the proxy's CONNECT // request, hence the test for HTTP method first Object httpRequest = context.getAttribute("http.request"); if (httpRequest != null && HttpRequestWrapper.class.isAssignableFrom(httpRequest.getClass())) { HttpRequestWrapper actualRequest = (HttpRequestWrapper) httpRequest; switch (HttpMethod.valueOf(actualRequest.getMethod())) { case GET: case POST: case PUT: case DELETE: apacheHttpRequest.setHeaders(((HttpRequestWrapper) httpRequest).getAllHeaders()); break; } } // Set returned headers smartsheetResponse.setHeaders(new HashMap<String, String>()); for (Header header : apacheHttpResponse.getAllHeaders()) { smartsheetResponse.getHeaders().put(header.getName(), header.getValue()); } smartsheetResponse.setStatus(apacheHttpResponse.getStatusLine().getStatusCode(), apacheHttpResponse.getStatusLine().toString()); // Set returned entities if (apacheHttpResponse.getEntity() != null) { HttpEntity httpEntity = new HttpEntity(); httpEntity.setContentType(apacheHttpResponse.getEntity().getContentType().getValue()); httpEntity.setContentLength(apacheHttpResponse.getEntity().getContentLength()); httpEntity.setContent(apacheHttpResponse.getEntity().getContent()); smartsheetResponse.setEntity(httpEntity); responseEntityCopy = new HttpEntitySnapshot(httpEntity); } long responseTime = endTime - startTime; logRequest(apacheHttpRequest, requestEntityCopy, smartsheetResponse, responseEntityCopy, responseTime); if (traces.size() > 0) { // trace-logging of request and response (if so configured) RequestAndResponseData requestAndResponseData = RequestAndResponseData.of(apacheHttpRequest, requestEntityCopy, smartsheetResponse, responseEntityCopy, traces); TRACE_WRITER.println(requestAndResponseData.toString(tracePrettyPrint)); } if (smartsheetResponse.getStatusCode() == 200) { // call successful, exit the retry loop break; } // the retry logic might consume the content stream so we make sure it supports mark/reset and mark it InputStream contentStream = smartsheetResponse.getEntity().getContent(); if (!contentStream.markSupported()) { // wrap the response stream in a input-stream that does support mark/reset contentStream = new ByteArrayInputStream(StreamUtil.readBytesFromStream(contentStream)); // close the old stream (just to be tidy) and then replace it with a reset-able stream smartsheetResponse.getEntity().getContent().close(); smartsheetResponse.getEntity().setContent(contentStream); } try { contentStream.mark((int) smartsheetResponse.getEntity().getContentLength()); long timeSpent = System.currentTimeMillis() - start; if (!shouldRetry(++attempt, timeSpent, smartsheetResponse)) { // should not retry, or retry time exceeded, exit the retry loop break; } } finally { if (bodyStream != null) { bodyStream.reset(); } contentStream.reset(); } // moving this to finally causes issues because socket is closed (which means response stream is closed) this.releaseConnection(); } catch (ClientProtocolException e) { try { logger.warn("ClientProtocolException " + e.getMessage()); logger.warn("{}", RequestAndResponseData.of(apacheHttpRequest, requestEntityCopy, smartsheetResponse, responseEntityCopy, REQUEST_RESPONSE_SUMMARY)); // if this is a PUT and was retried by the http client, the body content stream is at the // end and is a NonRepeatableRequest. If we marked the body content stream prior to execute, // reset and retry if (canRetryRequest && e.getCause() instanceof NonRepeatableRequestException) { if (smartsheetRequest.getEntity() != null) { smartsheetRequest.getEntity().getContent().reset(); } continue; } } catch (IOException ignore) { } throw new HttpClientException("Error occurred.", e); } catch (NoHttpResponseException e) { try { logger.warn("NoHttpResponseException " + e.getMessage()); logger.warn("{}", RequestAndResponseData.of(apacheHttpRequest, requestEntityCopy, smartsheetResponse, responseEntityCopy, REQUEST_RESPONSE_SUMMARY)); // check to see if the response was empty and this was a POST. All other HTTP methods // will be automatically retried by the http client. // (POST is non-idempotent and is not retried automatically, but is safe for us to retry) if (canRetryRequest && smartsheetRequest.getMethod() == HttpMethod.POST) { if (smartsheetRequest.getEntity() != null) { smartsheetRequest.getEntity().getContent().reset(); } continue; } } catch (IOException ignore) { } throw new HttpClientException("Error occurred.", e); } catch (IOException e) { try { logger.warn("{}", RequestAndResponseData.of(apacheHttpRequest, requestEntityCopy, smartsheetResponse, responseEntityCopy, REQUEST_RESPONSE_SUMMARY)); } catch (IOException ignore) { } throw new HttpClientException("Error occurred.", e); } } return smartsheetResponse; }
From source file:s3.com.qiniu.services.s3.AmazonS3Client.java
/** * Calculate the content length of a mark supported input stream. * * @param is input stream/*w w w . j a va 2 s. com*/ * @return length of the input stream */ private long calculateContentLength(InputStream is) { long len = 0; byte[] buf = new byte[8 * 1024]; int read; is.mark(-1); try { while ((read = is.read(buf)) != -1) { len += read; } is.reset(); } catch (IOException ioe) { throw new AmazonClientException("Could not calculate content length.", ioe); } return len; }