List of usage examples for java.io InputStream markSupported
public boolean markSupported()
mark
and reset
methods. From source file:org.talend.dataprep.schema.xls.XlsSchemaParser.java
/** * Parse all xls sheets./*from www. j a v a 2 s . c o m*/ * * @param request the schema parser request. * @return the list of parsed xls sheet. * @throws IOException if an error occurs. */ protected List<Schema.SheetContent> parseAllSheets(Request request) throws IOException { InputStream inputStream = request.getContent(); if (!inputStream.markSupported()) { inputStream = new PushbackInputStream(inputStream, 8); } boolean newExcelFormat = XlsUtils.isNewExcelFormat(inputStream); // parse the xls input stream using the correct format if (newExcelFormat) { return parseAllSheetsStream(new Request(inputStream, request.getMetadata())); } else { return parseAllSheetsOldFormat(new Request(inputStream, request.getMetadata())); } }
From source file:org.apache.xmlgraphics.ps.dsc.DSCParser.java
/** * Creates a new DSC parser.//from w w w . j av a 2 s . c o m * @param in InputStream to read the PostScript file from * (the stream is not closed by this class, the caller is responsible for that) * @throws IOException In case of an I/O error * @throws DSCException In case of a violation of the DSC spec */ public DSCParser(InputStream in) throws IOException, DSCException { if (in.markSupported()) { this.in = in; } else { //Decorate for better performance this.in = new java.io.BufferedInputStream(this.in); } String encoding = "US-ASCII"; try { this.reader = new java.io.BufferedReader(new java.io.InputStreamReader(this.in, encoding)); } catch (UnsupportedEncodingException e) { throw new RuntimeException("Incompatible VM! " + e.getMessage()); } parseNext(); }
From source file:android.net.http.Request.java
/** * Supply an InputStream that provides the body of a request. It's * not great that the caller must also provide the length of the data * returned by that InputStream, but the client needs to know up * front, and I'm not sure how to get this out of the InputStream * itself without a costly readthrough. I'm not sure skip() would * do what we want. If you know a better way, please let me know. *///from w w w .ja va 2s . com private void setBodyProvider(InputStream bodyProvider, int bodyLength) { if (!bodyProvider.markSupported()) { throw new IllegalArgumentException("bodyProvider must support mark()"); } // Mark beginning of stream bodyProvider.mark(Integer.MAX_VALUE); ((BasicHttpEntityEnclosingRequest) mHttpRequest).setEntity(new InputStreamEntity(bodyProvider, bodyLength)); }
From source file:org.talend.dataprep.schema.xls.XlsSchemaParser.java
/** * Parse all xls sheets for old excel document type * * @param request the xls request./* w w w . j av a 2 s . co m*/ * @return The parsed sheets request. */ private List<Schema.SheetContent> parseAllSheetsOldFormat(Request request) { final Marker marker = Markers.dataset(request.getMetadata().getId()); try { InputStream inputStream = request.getContent(); if (!inputStream.markSupported()) { inputStream = new PushbackInputStream(inputStream, 8); } Workbook hssfWorkbook = WorkbookFactory.create(inputStream); List<Schema.SheetContent> schemas; try { if (hssfWorkbook == null) { throw new IOException("could not open " + request.getMetadata().getId() + " as an excel file"); } int sheetNumber = hssfWorkbook.getNumberOfSheets(); if (sheetNumber < 1) { LOGGER.debug(marker, "has not sheet to read"); return Collections.emptyList(); } schemas = new ArrayList<>(); for (int i = 0; i < sheetNumber; i++) { Sheet sheet = hssfWorkbook.getSheetAt(i); if (sheet.getLastRowNum() < 1) { LOGGER.debug(marker, "sheet '{}' do not have rows skip ip", sheet.getSheetName()); continue; } List<ColumnMetadata> columnsMetadata = parsePerSheet(sheet, // request.getMetadata().getId(), // hssfWorkbook.getCreationHelper().createFormulaEvaluator()); String sheetName = sheet.getSheetName(); // update XlsSerializer if this default sheet naming change!!! schemas.add( new Schema.SheetContent(sheetName == null ? "sheet-" + i : sheetName, columnsMetadata)); } } finally { hssfWorkbook.close(); } return schemas; } catch (Exception e) { LOGGER.debug(marker, "Exception during parsing xls request :" + e.getMessage(), e); throw new TDPException(CommonErrorCodes.UNEXPECTED_EXCEPTION, e); } }
From source file:org.apache.tika.parser.pkg.CompressorParser.java
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { // At the end we want to close the compression stream to release // any associated resources, but the underlying document stream // should not be closed if (stream.markSupported()) { stream = new CloseShieldInputStream(stream); } else {/*from w w w .ja v a 2 s. co m*/ // Ensure that the stream supports the mark feature stream = new BufferedInputStream(new CloseShieldInputStream(stream)); } CompressorInputStream cis; try { CompressorParserOptions options = context.get(CompressorParserOptions.class, new CompressorParserOptions() { public boolean decompressConcatenated(Metadata metadata) { return false; } }); TikaCompressorStreamFactory factory = new TikaCompressorStreamFactory( options.decompressConcatenated(metadata), memoryLimitInKb); cis = factory.createCompressorInputStream(stream); } catch (CompressorException e) { if (e.getMessage() != null && e.getMessage().startsWith("MemoryLimitException:")) { throw new TikaMemoryLimitException(e.getMessage()); } throw new TikaException("Unable to uncompress document stream", e); } MediaType type = getMediaType(cis); if (!type.equals(MediaType.OCTET_STREAM)) { metadata.set(CONTENT_TYPE, type.toString()); } XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); try { Metadata entrydata = new Metadata(); String name = metadata.get(Metadata.RESOURCE_NAME_KEY); if (name != null) { if (name.endsWith(".tbz")) { name = name.substring(0, name.length() - 4) + ".tar"; } else if (name.endsWith(".tbz2")) { name = name.substring(0, name.length() - 5) + ".tar"; } else if (name.endsWith(".bz")) { name = name.substring(0, name.length() - 3); } else if (name.endsWith(".bz2")) { name = name.substring(0, name.length() - 4); } else if (name.endsWith(".xz")) { name = name.substring(0, name.length() - 3); } else if (name.endsWith(".zlib")) { name = name.substring(0, name.length() - 5); } else if (name.endsWith(".pack")) { name = name.substring(0, name.length() - 5); } else if (name.length() > 0) { name = GzipUtils.getUncompressedFilename(name); } entrydata.set(Metadata.RESOURCE_NAME_KEY, name); } // Use the delegate parser to parse the compressed document EmbeddedDocumentExtractor extractor = EmbeddedDocumentUtil.getEmbeddedDocumentExtractor(context); if (extractor.shouldParseEmbedded(entrydata)) { extractor.parseEmbedded(cis, xhtml, entrydata, true); } } finally { cis.close(); } xhtml.endDocument(); }
From source file:org.obm.push.backend.obm22.mail.EmailManager.java
public void sendEmail(BackendSession bs, String from, Set<Address> setTo, Set<Address> setCc, Set<Address> setCci, InputStream mimeMail, Boolean saveInSent) { try {//ww w. j a v a 2s. c om logger.info("Send mail to " + setTo); if (!mimeMail.markSupported()) { ByteArrayOutputStream outPut = new ByteArrayOutputStream(); FileUtils.transfer(mimeMail, outPut, true); mimeMail = new ByteArrayInputStream(outPut.toByteArray()); } SMTPProtocol smtp = getSmtpClient(bs); smtp.openPort(); smtp.ehlo(InetAddress.getLocalHost()); Address addrFrom = new Address(from); smtp.mail(addrFrom); Address[] recipients = getAllRistrettoRecipients(setTo, setCc, setCci); for (Address to : recipients) { Address cleaned = new Address(to.getMailAddress()); smtp.rcpt(cleaned); } smtp.data(mimeMail); smtp.quit(); if (saveInSent) { mimeMail.reset(); storeInSent(bs, mimeMail); } } catch (Throwable e) { logger.error(e.getMessage(), e); } }
From source file:org.jets3t.service.impl.rest.httpclient.RepeatableRequestEntity.java
/** * Creates a repeatable request entity for the input stream provided. * <p>/* w ww . j av a2s . co m*/ * If the input stream provided, or any underlying wrapped input streams, supports the * {@link InputStream#reset()} method then it will be capable of repeating data * transmission. If the input stream provided does not supports this method, it will * automatically be wrapped in a {@link RepeatableInputStream} -- in this case, the data * read from the wrapped input stream will be buffered up to the limit set by the JetS3t * property <tt>uploads.stream-retry-buffer-size</tt> (default: 131072 bytes). * * <p> * This constructor also detects when an underlying {@link ProgressMonitoredInputStream} is * present, and will notify this monitor if a repeat occurs. * <p> * If the JetS3t properties option <code>httpclient.read-throttle</code> is set to a * non-zero value, all simultaneous uploads performed by this class will be throttled * to the specified speed. * * @param name * * @param is * the input stream that supplies the data to be made repeatable. * @param contentType * @param contentLength * @param enableLiveMD5Hashing * if true, data that passes through the object will be hashed to an MD5 digest * and this digest will be available from {@link #getMD5DigestOfData()}. If false, * the digest will not be calculated. */ public RepeatableRequestEntity(String name, InputStream is, String contentType, long contentLength, Jets3tProperties jets3tProperties, boolean enableLiveMD5Hashing) { if (is == null) { throw new IllegalArgumentException("InputStream cannot be null"); } this.is = is; this.name = name; this.contentLength = contentLength; this.contentType = contentType; this.isLiveMD5HashingEnabled = enableLiveMD5Hashing; InputStream inputStream = is; while (true) { if (inputStream instanceof ProgressMonitoredInputStream) { progressMonitoredIS = (ProgressMonitoredInputStream) inputStream; } if (inputStream.markSupported()) { repeatableInputStream = inputStream; int bufferSize = -1; // Mark the start of this input stream so we can reset it if necessary, // while being careful with streams that use in-memory backing storage. if (repeatableInputStream instanceof BufferedInputStream) { bufferSize = jets3tProperties.getIntProperty("uploads.stream-retry-buffer-size", 131072); log.debug("Setting conservative read-ahead mark limit for BufferedInputStream" + " since it keeps read data in-memory and can cause memory starvation: " + bufferSize + " (from property 'uploads.stream-retry-buffer-size')"); } else { bufferSize = (int) Math.min(contentLength, Integer.MAX_VALUE); log.debug("Setting maximal read-ahead mark limit for markable input stream " + repeatableInputStream.getClass().getName() + " assuming it doesn't use in-memory storage: " + bufferSize); } repeatableInputStream.mark(bufferSize); } if (inputStream instanceof InputStreamWrapper) { inputStream = ((InputStreamWrapper) inputStream).getWrappedInputStream(); } else { break; } } if (this.repeatableInputStream == null) { if (log.isDebugEnabled()) { log.debug("Wrapping non-repeatable input stream in a RepeatableInputStream"); } int bufferSize = jets3tProperties.getIntProperty("uploads.stream-retry-buffer-size", 131072); this.is = new RepeatableInputStream(is, bufferSize); this.repeatableInputStream = this.is; } MAX_BYTES_PER_SECOND = 1024 * jets3tProperties.getLongProperty("httpclient.read-throttle", 0); }
From source file:om.sstvencoder.CropView.java
public void setBitmapStream(InputStream stream) throws IOException { mImageOK = false;/*from w w w.j a va2s . c om*/ mOrientation = 0; recycle(); // app6 + exif int bufferBytes = 1048576; if (!stream.markSupported()) stream = new BufferedInputStream(stream, bufferBytes); stream.mark(bufferBytes); BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeStream(new BufferedInputStream(stream), null, options); stream.reset(); mImageWidth = options.outWidth; mImageHeight = options.outHeight; if (mImageWidth * mImageHeight < 1024 * 1024) { mCacheBitmap = BitmapFactory.decodeStream(stream); mSmallImage = true; } else { mRegionDecoder = BitmapRegionDecoder.newInstance(stream, true); mCacheRect.setEmpty(); mSmallImage = false; } if (mCacheBitmap == null && mRegionDecoder == null) { String size = options.outWidth + "x" + options.outHeight; throw new IOException("Stream could not be decoded. Image size: " + size); } mImageOK = true; resetInputRect(); invalidate(); }
From source file:com.okta.sdk.impl.http.httpclient.HttpClientRequestExecutor.java
@Override public Response executeRequest(Request request) throws RestException { Assert.notNull(request, "Request argument cannot be null."); int retryCount = 0; URI redirectUri = null;// ww w . j a v a 2 s. co m HttpEntity entity = null; RestException exception = null; // Make a copy of the original request params and headers so that we can // permute them in the loop and start over with the original every time. QueryString originalQuery = new QueryString(); originalQuery.putAll(request.getQueryString()); HttpHeaders originalHeaders = new HttpHeaders(); originalHeaders.putAll(request.getHeaders()); while (true) { if (redirectUri != null) { request = new DefaultRequest(request.getMethod(), redirectUri.toString(), null, null, request.getBody(), request.getHeaders().getContentLength()); } if (retryCount > 0) { request.setQueryString(originalQuery); request.setHeaders(originalHeaders); } // Sign the request this.requestAuthenticator.authenticate(request); HttpRequestBase httpRequest = this.httpClientRequestFactory.createHttpClientRequest(request, entity); if (httpRequest instanceof HttpEntityEnclosingRequest) { entity = ((HttpEntityEnclosingRequest) httpRequest).getEntity(); } HttpResponse httpResponse = null; try { // We don't want to treat a redirect like a retry, // so if redirectUri is not null, we won't pause // before executing the request below. if (retryCount > 0 && redirectUri == null) { pauseExponentially(retryCount, exception); if (entity != null) { InputStream content = entity.getContent(); if (content.markSupported()) { content.reset(); } } } // reset redirectUri so that if there is an exception, we will pause on retry redirectUri = null; exception = null; retryCount++; httpResponse = httpClient.execute(httpRequest); if (isRedirect(httpResponse)) { Header[] locationHeaders = httpResponse.getHeaders("Location"); String location = locationHeaders[0].getValue(); log.debug("Redirecting to: {}", location); redirectUri = URI.create(location); httpRequest.setURI(redirectUri); } else { Response response = toSdkResponse(httpResponse); int httpStatus = response.getHttpStatus(); if (httpStatus == 429) { throw new RestException( "HTTP 429: Too Many Requests. Exceeded request rate limit in the allotted amount of time."); } if ((httpStatus == 503 || httpStatus == 504) && retryCount <= this.numRetries) { //allow the loop to continue to execute a retry request continue; } return response; } } catch (Throwable t) { log.warn("Unable to execute HTTP request: ", t.getMessage(), t); if (t instanceof RestException) { exception = (RestException) t; } if (!shouldRetry(httpRequest, t, retryCount)) { throw new RestException("Unable to execute HTTP request: " + t.getMessage(), t); } } finally { try { httpResponse.getEntity().getContent().close(); } catch (Throwable ignored) { // NOPMD } } } }
From source file:com.stormpath.sdk.impl.http.httpclient.HttpClientRequestExecutor.java
@Override public Response executeRequest(Request request) throws RestException { Assert.notNull(request, "Request argument cannot be null."); int retryCount = 0; URI redirectUri = null;/*from w w w . j a v a2 s .c o m*/ HttpEntity entity = null; RestException exception = null; // Make a copy of the original request params and headers so that we can // permute them in the loop and start over with the original every time. QueryString originalQuery = new QueryString(); originalQuery.putAll(request.getQueryString()); HttpHeaders originalHeaders = new HttpHeaders(); originalHeaders.putAll(request.getHeaders()); while (true) { if (redirectUri != null) { request = new DefaultRequest(request.getMethod(), redirectUri.toString(), null, null, request.getBody(), request.getHeaders().getContentLength()); } if (retryCount > 0) { request.setQueryString(originalQuery); request.setHeaders(originalHeaders); } // Sign the request this.requestAuthenticator.authenticate(request); HttpRequestBase httpRequest = this.httpClientRequestFactory.createHttpClientRequest(request, entity); if (httpRequest instanceof HttpEntityEnclosingRequest) { entity = ((HttpEntityEnclosingRequest) httpRequest).getEntity(); } HttpResponse httpResponse = null; try { // We don't want to treat a redirect like a retry, // so if redirectUri is not null, we won't pause // before executing the request below. if (retryCount > 0 && redirectUri == null) { pauseExponentially(retryCount, exception); if (entity != null) { InputStream content = entity.getContent(); if (content.markSupported()) { content.reset(); } } } // reset redirectUri so that if there is an exception, we will pause on retry redirectUri = null; exception = null; retryCount++; httpResponse = httpClient.execute(httpRequest); if (isRedirect(httpResponse)) { Header[] locationHeaders = httpResponse.getHeaders("Location"); String location = locationHeaders[0].getValue(); log.debug("Redirecting to: {}", location); redirectUri = request.getResourceUrl().resolve(location); httpRequest.setURI(redirectUri); } else { Response response = toSdkResponse(httpResponse); int httpStatus = response.getHttpStatus(); if (httpStatus == 429) { throw new RestException( "HTTP 429: Too Many Requests. Exceeded request rate limit in the allotted amount of time."); } if ((httpStatus == 503 || httpStatus == 504) && retryCount <= this.numRetries) { //allow the loop to continue to execute a retry request continue; } return response; } } catch (Throwable t) { log.warn("Unable to execute HTTP request: ", t.getMessage(), t); if (t instanceof RestException) { exception = (RestException) t; } if (!shouldRetry(httpRequest, t, retryCount)) { throw new RestException("Unable to execute HTTP request: " + t.getMessage(), t); } } finally { try { httpResponse.getEntity().getContent().close(); } catch (Throwable ignored) { } } } }