Example usage for java.io BufferedInputStream mark

List of usage examples for java.io BufferedInputStream mark

Introduction

In this page you can find the example usage for java.io BufferedInputStream mark.

Prototype

public synchronized void mark(int readlimit) 

Source Link

Document

See the general contract of the mark method of InputStream.

Usage

From source file:org.sparkcommerce.cms.file.service.StaticAssetStorageServiceImpl.java

@Transactional("blTransactionManagerAssetStorageInfo")
@Override/*  ww  w  . ja va2 s  .  c  o  m*/
public Map<String, String> getCacheFileModel(String fullUrl, Map<String, String> parameterMap)
        throws Exception {
    StaticAsset staticAsset = findStaticAsset(fullUrl);
    if (staticAsset == null) {
        throw new AssetNotFoundException("Unable to find an asset for the url (" + fullUrl + ")");
    }
    String mimeType = staticAsset.getMimeType();

    //extract the values for any named parameters
    Map<String, String> convertedParameters = namedOperationManager.manageNamedParameters(parameterMap);
    String cachedFileName = constructCacheFileName(staticAsset, convertedParameters);

    // Look for a shared file (this represents a file that was based on a file originally in the classpath.
    File cacheFile = getFileFromLocalRepository(cachedFileName);
    if (cacheFile.exists()) {
        return buildModel(cacheFile.getAbsolutePath(), mimeType);
    }

    // Obtain the base file (that we may need to convert based on the parameters
    String baseCachedFileName = constructCacheFileName(staticAsset, null);
    File baseLocalFile = getFileFromLocalRepository(baseCachedFileName);

    if (!baseLocalFile.exists()) {
        if (sparkFileService.checkForResourceOnClassPath(staticAsset.getFullUrl())) {
            cacheFile = sparkFileService.getSharedLocalResource(cachedFileName);
            baseLocalFile = sparkFileService.getSharedLocalResource(baseCachedFileName);
            createLocalFileFromClassPathResource(staticAsset, baseLocalFile);
        } else {
            baseLocalFile = lookupAssetAndCreateLocalFile(staticAsset, baseLocalFile);
        }
    }

    if (convertedParameters.isEmpty()) {
        return buildModel(baseLocalFile.getAbsolutePath(), mimeType);
    } else {
        FileInputStream assetStream = new FileInputStream(baseLocalFile);
        BufferedInputStream original = new BufferedInputStream(assetStream);
        original.mark(0);

        Operation[] operations = artifactService.buildOperations(convertedParameters, original,
                staticAsset.getMimeType());
        InputStream converted = artifactService.convert(original, operations, staticAsset.getMimeType());

        createLocalFileFromInputStream(converted, cacheFile);
        if ("image/gif".equals(mimeType)) {
            mimeType = "image/png";
        }
        return buildModel(cacheFile.getAbsolutePath(), mimeType);
    }
}

From source file:org.broadleafcommerce.cms.file.service.StaticAssetStorageServiceImpl.java

@Transactional("blTransactionManagerAssetStorageInfo")
@Override/*w w  w  .j a  va 2  s  .c  o m*/
public Map<String, String> getCacheFileModel(String fullUrl, Map<String, String> parameterMap)
        throws Exception {
    StaticAsset staticAsset = findStaticAsset(fullUrl);
    if (staticAsset == null) {
        throw new AssetNotFoundException("Unable to find an asset for the url (" + fullUrl + ")");
    }
    String mimeType = staticAsset.getMimeType();

    //extract the values for any named parameters
    Map<String, String> convertedParameters = namedOperationManager.manageNamedParameters(parameterMap);
    String cachedFileName = constructCacheFileName(staticAsset, convertedParameters);

    // Look for a shared file (this represents a file that was based on a file originally in the classpath.
    File cacheFile = getFileFromLocalRepository(cachedFileName);
    if (cacheFile.exists()) {
        return buildModel(cacheFile.getAbsolutePath(), mimeType);
    }

    // Obtain the base file (that we may need to convert based on the parameters
    String baseCachedFileName = constructCacheFileName(staticAsset, null);
    File baseLocalFile = getFileFromLocalRepository(baseCachedFileName);

    if (!baseLocalFile.exists()) {
        if (broadleafFileService.checkForResourceOnClassPath(staticAsset.getFullUrl())) {
            cacheFile = broadleafFileService.getSharedLocalResource(cachedFileName);
            baseLocalFile = broadleafFileService.getSharedLocalResource(baseCachedFileName);
            createLocalFileFromClassPathResource(staticAsset, baseLocalFile);
        } else {
            baseLocalFile = lookupAssetAndCreateLocalFile(staticAsset, baseLocalFile);
        }
    }

    if (convertedParameters.isEmpty()) {
        return buildModel(baseLocalFile.getAbsolutePath(), mimeType);
    } else {
        FileInputStream assetStream = new FileInputStream(baseLocalFile);
        BufferedInputStream original = new BufferedInputStream(assetStream);
        original.mark(0);

        Operation[] operations = artifactService.buildOperations(convertedParameters, original,
                staticAsset.getMimeType());
        InputStream converted = artifactService.convert(original, operations, staticAsset.getMimeType());

        createLocalFileFromInputStream(converted, cacheFile);
        if ("image/gif".equals(mimeType)) {
            mimeType = "image/png";
        }
        return buildModel(cacheFile.getAbsolutePath(), mimeType);
    }
}

From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.dta.DTAFileReaderSpi.java

@Override
public boolean canDecodeInput(BufferedInputStream stream) throws IOException {
    if (stream == null) {
        throw new IllegalArgumentException("stream == null!");
    }/*from  w  ww.j a  v a 2s  . c  o  m*/

    dbgLog.fine("applying the dta test\n");

    byte[] b = new byte[DTA_HEADER_SIZE];

    if (stream.markSupported()) {
        stream.mark(0);
    }
    int nbytes = stream.read(b, 0, DTA_HEADER_SIZE);

    if (nbytes == 0) {
        throw new IOException();
    }

    if (stream.markSupported()) {
        stream.reset();
    }

    dbgLog.info("hex dump: 1st 4bytes =>" + new String(Hex.encodeHex(b)) + "<-");

    if (b[2] != 1) {
        dbgLog.fine("3rd byte is not 1: given file is not stata-dta type");
        return false;
    } else if ((b[1] != 1) && (b[1] != 2)) {
        dbgLog.fine("2nd byte is neither 0 nor 1: this file is not stata-dta type");
        return false;
    } else if (!DTAFileReaderSpi.stataReleaseNumber.containsKey(b[0])) {
        dbgLog.fine("1st byte (" + b[0] + ") is not within the ingestable range [rel. 3-10]:"
                + "this file is NOT stata-dta type");
        return false;
    } else {
        dbgLog.fine("this file is stata-dta type: " + DTAFileReaderSpi.stataReleaseNumber.get(b[0])
                + "(No in HEX=" + b[0] + ")");
        return true;
    }

}

From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.sav.SAVFileReaderSpi.java

@Override
public boolean canDecodeInput(BufferedInputStream stream) throws IOException {
    if (stream == null) {
        throw new IllegalArgumentException("stream == null!");
    }// w  w  w.  ja v  a  2 s  .  co  m

    dbgLog.fine("\napplying the sav test: inputstream case\n");

    byte[] b = new byte[SAV_HEADER_SIZE];

    if (stream.markSupported()) {
        stream.mark(0);
    }
    int nbytes = stream.read(b, 0, SAV_HEADER_SIZE);

    if (nbytes == 0) {
        throw new IOException();
    }
    //printHexDump(b, "hex dump of the byte-array");
    dbgLog.info(
            "hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + (new String(Hex.encodeHex(b))).toUpperCase());

    if (stream.markSupported()) {
        stream.reset();
    }

    boolean DEBUG = false;

    String hdr4sav = new String(b);
    dbgLog.fine("from string[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b)).toUpperCase());

    if (hdr4sav.equals(SAV_FILE_SIGNATURE)) {
        dbgLog.fine("this file is spss-sav type");
        return true;
    } else {
        dbgLog.fine("this file is NOT spss-sav type");
        return false;
    }
}

From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReaderSpi.java

@Override
public boolean canDecodeInput(BufferedInputStream stream) throws IOException {
    if (stream == null) {
        throw new IllegalArgumentException("stream == null!");
    }/*  ww  w  .  j a  v  a2  s.c  o  m*/

    dbgLog.fine("\napplying the sav test: inputstream case\n");

    byte[] b = new byte[SAV_HEADER_SIZE];

    if (stream.markSupported()) {
        stream.mark(0);
    }
    int nbytes = stream.read(b, 0, SAV_HEADER_SIZE);

    if (nbytes == 0) {
        throw new IOException();
    }
    //printHexDump(b, "hex dump of the byte-array");
    dbgLog.fine(
            "hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + (new String(Hex.encodeHex(b))).toUpperCase());

    if (stream.markSupported()) {
        stream.reset();
    }

    boolean DEBUG = false;

    String hdr4sav = new String(b);
    dbgLog.fine("from string[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b)).toUpperCase());

    if (hdr4sav.equals(SAV_FILE_SIGNATURE)) {
        dbgLog.fine("this file is spss-sav type");
        return true;
    } else {
        dbgLog.fine("this file is NOT spss-sav type");
        return false;
    }
}

From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.dta.DTAFileReaderSpi.java

@Override
public boolean canDecodeInput(BufferedInputStream stream) throws IOException {
    if (stream == null) {
        throw new IllegalArgumentException("stream == null!");
    }/*w ww . j  a va 2 s  .  co  m*/

    dbgLog.fine("applying the dta test\n");

    byte[] b = new byte[DTA_HEADER_SIZE];

    if (stream.markSupported()) {
        stream.mark(0);
    }
    int nbytes = stream.read(b, 0, DTA_HEADER_SIZE);

    if (nbytes == 0) {
        throw new IOException();
    }
    //printHexDump(b, "hex dump of the byte-array");

    if (stream.markSupported()) {
        stream.reset();
    }

    boolean DEBUG = false;

    dbgLog.info("hex dump: 1st 4bytes =>" + new String(Hex.encodeHex(b)) + "<-");

    if (b[2] != 1) {
        dbgLog.fine("3rd byte is not 1: given file is not stata-dta type");
        return false;
    } else if ((b[1] != 1) && (b[1] != 2)) {
        dbgLog.fine("2nd byte is neither 0 nor 1: this file is not stata-dta type");
        return false;
    } else if (!DTAFileReaderSpi.stataReleaseNumber.containsKey(b[0])) {
        dbgLog.fine("1st byte (" + b[0] + ") is not within the ingestable range [rel. 3-10]:"
                + "this file is NOT stata-dta type");
        return false;
    } else {
        dbgLog.fine("this file is stata-dta type: " + DTAFileReaderSpi.stataReleaseNumber.get(b[0])
                + "(No in HEX=" + b[0] + ")");
        return true;
    }

}

From source file:flex.messaging.services.http.proxy.ResponseFilter.java

protected void writeResponseAsString(InputStream inStream, int length, ProxyContext context)
        throws IOException {
    char[] tmp = new char[RESPONSE_CHUNK];
    //int i = 0;/* w  ww.java  2 s . com*/
    StringBuffer sb = new StringBuffer(length < 0 ? 16 : length);
    BufferedInputStream bufferedIn = new BufferedInputStream(inStream);
    String charset = context.getHttpMethod().getResponseCharSet();

    bufferedIn.mark(4);

    // Check for BOM as InputStreamReader does not strip BOM in all cases.
    boolean hasBOM = false;
    int read = bufferedIn.read();
    if (read > 0) {
        // UTF-8 BOM is EF BB BF
        if (0xEF == (read & 0xFF)) {
            read = bufferedIn.read();
            if (0xBB == (read & 0xFF)) {
                read = bufferedIn.read();
                if (0xBF == (read & 0xFF)) {
                    hasBOM = true;
                    charset = "UTF-8";
                }
            }
        }
        // UTF-16 Little Endian BOM is FF FE
        // UTF-32 Little Endian BOM is FF FE 00 00
        else if (0xFF == (read & 0xFF)) {
            read = bufferedIn.read();
            if (0xFE == (read & 0xFF)) {
                hasBOM = true;
                charset = "UTF16-LE";

                // Check two more bytes incase we have UTF-32
                bufferedIn.mark(2);
                read = bufferedIn.read();
                if (0x00 == (read & 0xFF)) {
                    read = bufferedIn.read();
                    if (0x00 == (read & 0xFF)) {
                        charset = "UTF32-LE";
                    } else {
                        bufferedIn.reset();
                    }
                } else {
                    bufferedIn.reset();
                }
            }
        }
        // UTF-16 Big Endian BOM is FE FF
        else if (0xFE == (read & 0xFF)) {
            read = bufferedIn.read();
            if (0xFF == (read & 0xFF)) {
                hasBOM = true;
                charset = "UTF16-BE";
            }
        }
        // UTF-32 Big Endian BOM is 00 00 FE FF
        else if (0x00 == (read & 0xFF)) {
            read = bufferedIn.read();
            if (0x00 == (read & 0xFF)) {
                read = bufferedIn.read();
                if (0xFE == (read & 0xFF)) {
                    read = bufferedIn.read();
                    if (0xFF == (read & 0xFF)) {
                        hasBOM = true;
                        charset = "UTF32-BE";
                    }
                }
            }
        }

        // If we didn't find a BOM, all bytes should contribute to the content
        if (!hasBOM)
            bufferedIn.reset();
    }

    BufferedReader reader = new BufferedReader(new InputStreamReader(bufferedIn, charset));
    int charactersRead = -1;
    while ((charactersRead = reader.read(tmp, 0, tmp.length)) >= 0) {
        sb.append(new String(tmp, 0, charactersRead));
    }

    context.setResponse(sb.toString());
}

From source file:com.google.acre.script.NHttpAsyncUrlfetch.java

private Scriptable callback_result(long start_time, URL url, HttpResponse res, boolean system,
        boolean log_to_user, String response_encoding) {
    BrowserCompatSpecFactory bcsf = new BrowserCompatSpecFactory();
    CookieSpec cspec = bcsf.newInstance(null);
    String protocol = url.getProtocol();
    boolean issecure = ("https".equals(protocol));
    int port = url.getPort();
    if (port == -1)
        port = 80;//from  w  w w  .  java  2s .c  o  m
    CookieOrigin origin = new CookieOrigin(url.getHost(), port, url.getPath(), issecure);

    Object body = "";
    int status = res.getStatusLine().getStatusCode();

    Context ctx = Context.getCurrentContext();
    Scriptable out = ctx.newObject(_scope);
    Scriptable headers = ctx.newObject(_scope);
    Scriptable cookies = ctx.newObject(_scope);

    out.put("status", out, status);
    out.put("headers", out, headers);
    out.put("cookies", out, cookies);

    Header content_type_header = null;

    StringBuilder response_header_log = new StringBuilder();
    for (Header h : res.getAllHeaders()) {
        if (h.getName().equalsIgnoreCase("set-cookie")) {
            String set_cookie = h.getValue();
            Matcher m = Pattern.compile("\\s*(([^,]|(,\\s*\\d))+)").matcher(set_cookie);
            while (m.find()) {
                Header ch = new BasicHeader("Set-Cookie", set_cookie.substring(m.start(), m.end()));
                try {
                    List<Cookie> pcookies = cspec.parse(ch, origin);
                    for (Cookie c : pcookies) {
                        cookies.put(c.getName(), cookies, new AcreCookie(c).toJsObject(_scope));
                    }
                } catch (MalformedCookieException e) {
                    throw new RuntimeException(e);
                }
            }
        } else if (h.getName().equalsIgnoreCase("content-type")) {
            content_type_header = h;
        }

        response_header_log.append(h.getName() + ": " + h.getValue() + "\r\n");
        headers.put(h.getName(), headers, h.getValue());
    }

    String charset = null;
    if (content_type_header != null) {
        HeaderElement values[] = content_type_header.getElements();
        if (values.length == 1) {
            NameValuePair param = values[0].getParameterByName("charset");
            if (param != null) {
                charset = param.getValue();
            }
        }
    }

    if (charset == null)
        charset = response_encoding;

    // read body
    HttpEntity ent = res.getEntity();
    try {
        if (ent != null) {
            InputStream res_stream = ent.getContent();
            Header cenc = ent.getContentEncoding();
            if (cenc != null && res_stream != null) {
                HeaderElement[] codecs = cenc.getElements();
                for (HeaderElement codec : codecs) {
                    if (codec.getName().equalsIgnoreCase("gzip")) {
                        res_stream = new GZIPInputStream(res_stream);
                    }
                }
            }

            long first_byte_time = 0;
            long end_time = 0;
            if (content_type_header != null && (content_type_header.getValue().startsWith("image/")
                    || content_type_header.getValue().startsWith("application/octet-stream")
                    || content_type_header.getValue().startsWith("multipart/form-data"))) {
                // HttpClient's InputStream doesn't support mark/reset, so
                // wrap it with one that does.
                BufferedInputStream bufis = new BufferedInputStream(res_stream);
                bufis.mark(2);
                bufis.read();
                first_byte_time = System.currentTimeMillis();
                bufis.reset();
                byte[] data = IOUtils.toByteArray(bufis);

                end_time = System.currentTimeMillis();
                body = new JSBinary();
                ((JSBinary) body).set_data(data);

                try {
                    if (res_stream != null)
                        res_stream.close();
                } catch (IOException e) {
                    // ignore
                }
            } else if (res_stream == null || charset == null) {
                first_byte_time = end_time = System.currentTimeMillis();
                body = "";
            } else {
                StringWriter writer = new StringWriter();
                Reader reader = new InputStreamReader(res_stream, charset);
                int i = reader.read();
                first_byte_time = System.currentTimeMillis();
                writer.write(i);
                IOUtils.copy(reader, writer);
                end_time = System.currentTimeMillis();
                body = writer.toString();

                try {
                    reader.close();
                    writer.close();
                } catch (IOException e) {
                    // ignore
                }
            }

            long reading_time = end_time - first_byte_time;
            long waiting_time = first_byte_time - start_time;

            String httprephdr = response_header_log.toString();
            // XXX need to log start-time of request
            _logger.syslog4j("DEBUG", "urlfetch.response.async", "URL", url.toString(), "Status",
                    Integer.toString(status), "Headers", httprephdr, "Reading time", reading_time,
                    "Waiting time", waiting_time);

            if (system && log_to_user) {
                _response.userlog4j("DEBUG", "urlfetch.response.async", "URL", url.toString(), "Status",
                        Integer.toString(status), "Headers", httprephdr);

            }

            // XXX seems like AcreResponse should be able to use
            // the statistics object to generate x-metaweb-cost
            // given a bit of extra information

            Statistics.instance().collectUrlfetchTime(start_time, first_byte_time, end_time);

            _costCollector.collect((system) ? "asuc" : "auuc").collect((system) ? "asuw" : "auuw",
                    waiting_time);

        }

    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    out.put("body", out, body);

    return out;
}

From source file:slash.navigation.photo.PhotoFormat.java

public void read(InputStream source, ParserContext<Wgs84Route> context) throws Exception {
    BufferedInputStream bufferedSource = new BufferedInputStream(source, READ_BUFFER_SIZE);
    bufferedSource.mark(READ_BUFFER_SIZE);

    Dimension size = Imaging.getImageSize(bufferedSource, null);
    if (size == null)
        return;/*from  ww  w . ja v a  2 s  .  c  o m*/

    PhotoPosition position = new PhotoPosition(NotTaggable, context.getStartDate(), "No EXIF data", null);

    bufferedSource.reset();
    ImageMetadata metadata = Imaging.getMetadata(bufferedSource, null);
    TiffImageMetadata tiffImageMetadata = extractTiffImageMetadata(metadata);
    if (tiffImageMetadata != null) {
        @SuppressWarnings("unchecked")
        List<Directory> directories = (List<Directory>) tiffImageMetadata.getDirectories();
        for (Directory directory : directories)
            log.info("Reading EXIF directory " + directory);

        extendPosition(position, tiffImageMetadata, context.getStartDate());
    }

    bufferedSource.reset();
    File image = context.getFile();
    if (image == null)
        image = extractToTempFile(bufferedSource);
    position.setOrigin(image);
    position.setWaypointType(Photo);
    context.appendRoute(new Wgs84Route(this, Waypoints, new ArrayList<Wgs84Position>(singletonList(position))));
}

From source file:org.apache.hadoop.gateway.filter.rewrite.impl.UrlRewriteResponse.java

@Override
public void streamResponse(InputStream input, OutputStream output) throws IOException {
    InputStream inStream;/*from  w  w w  .  j a  v  a 2s .  c om*/
    OutputStream outStream;
    boolean isGzip = false;
    BufferedInputStream inBuffer = new BufferedInputStream(input);
    try {
        // Use this way to check whether the input stream is gzip compressed, in case
        // the content encoding header is unknown, as it could be unset in inbound response
        inBuffer.mark(STREAM_BUFFER_SIZE);
        inStream = new GZIPInputStream(inBuffer);
        isGzip = true;
    } catch (ZipException e) {
        inBuffer.reset();
        inStream = inBuffer;
    } catch (IOException e) {
        inBuffer.reset();
        inStream = inBuffer;
    }

    MimeType mimeType = getMimeType();
    UrlRewriteFilterContentDescriptor filterContentConfig = getRewriteFilterConfig(rewriter.getConfig(),
            bodyFilterName, mimeType);
    if (filterContentConfig != null) {
        String asType = filterContentConfig.asType();
        if (asType != null && asType.trim().length() > 0) {
            mimeType = MimeTypes.create(asType, getCharacterEncoding());
        }
    }
    InputStream filteredInput = UrlRewriteStreamFilterFactory.create(mimeType, null, inStream, rewriter, this,
            UrlRewriter.Direction.OUT, filterContentConfig);
    outStream = (isGzip) ? new GZIPOutputStream(output) : output;
    IOUtils.copyLarge(filteredInput, outStream, new byte[STREAM_BUFFER_SIZE]);
    //KNOX-685: outStream.flush();
    outStream.close();
}