Example usage for java.io InputStream reset

List of usage examples for java.io InputStream reset

Introduction

In this page you can find the example usage for java.io InputStream reset.

Prototype

public synchronized void reset() throws IOException 

Source Link

Document

Repositions this stream to the position at the time the mark method was last called on this input stream.

Usage

From source file:org.opendatakit.services.sync.service.logic.AggregateSynchronizer.java

/**
 * Download the file at the given URI to the specified local file.
 *
 * @param destFile//from   www .j a  v a 2s .c  o  m
 * @param downloadUrl
 * @throws HttpClientWebException
 * @throws IOException
 */
@Override
public void downloadFile(File destFile, URI downloadUrl) throws HttpClientWebException, IOException {

    // WiFi network connections can be renegotiated during a large form download
    // sequence.
    // This will cause intermittent download failures. Silently retry once after
    // each
    // failure. Only if there are two consecutive failures, do we abort.
    boolean success = false;
    int attemptCount = 0;
    while (!success && attemptCount++ <= 2) {

        HttpGet request = new HttpGet();
        // no body content-type and no response content-type requested
        wrapper.buildBasicRequest(downloadUrl, request);
        if (destFile.exists()) {
            String md5Hash = ODKFileUtils.getMd5Hash(sc.getAppName(), destFile);
            request.addHeader(HttpHeaders.IF_NONE_MATCH, md5Hash);
        }

        CloseableHttpResponse response = null;
        try {
            response = wrapper.httpClientExecute(request, HttpRestProtocolWrapper.SC_OK_SC_NOT_MODIFIED);
            int statusCode = response.getStatusLine().getStatusCode();

            if (statusCode == HttpStatus.SC_NOT_MODIFIED) {
                log.i(LOGTAG, "downloading " + downloadUrl.toString() + " returns non-modified -- No-Op");
                return;
            }

            File tmp = new File(destFile.getParentFile(), destFile.getName() + ".tmp");
            int totalLen = 0;
            InputStream is = null;
            BufferedOutputStream os = null;
            try {
                // open the InputStream of the (uncompressed) entity body...
                is = response.getEntity().getContent();

                os = new BufferedOutputStream(new FileOutputStream(tmp));

                // write connection to temporary file
                byte buf[] = new byte[8192];
                int len;
                while ((len = is.read(buf, 0, buf.length)) >= 0) {
                    if (len != 0) {
                        totalLen += len;
                        os.write(buf, 0, len);
                    }
                }
                is.close();
                is = null;

                os.flush();
                os.close();
                os = null;

                success = tmp.renameTo(destFile);
            } catch (Exception e) {
                // most likely a socket timeout
                e.printStackTrace();
                log.e(LOGTAG, "downloading " + downloadUrl.toString() + " failed after " + totalLen + " bytes: "
                        + e.toString());
                try {
                    // signal to the framework that this socket is hosed.
                    // with the various nested streams, this may not work...
                    is.reset();
                } catch (Exception ex) {
                    // ignore
                }
                throw e;
            } finally {
                if (os != null) {
                    try {
                        os.close();
                    } catch (Exception e) {
                        // no-op
                    }
                }
                if (is != null) {
                    try {
                        // ensure stream is consumed...
                        byte buf[] = new byte[8192];
                        while (is.read(buf) >= 0)
                            ;
                    } catch (Exception e) {
                        // no-op
                    }
                    try {
                        is.close();
                    } catch (Exception e) {
                        // no-op
                    }
                }
                if (tmp.exists()) {
                    tmp.delete();
                }

                if (response != null) {
                    response.close();
                }
            }
        } catch (Exception e) {
            log.printStackTrace(e);
            if (attemptCount != 1) {
                throw e;
            }
        } finally {
            if (response != null) {
                EntityUtils.consumeQuietly(response.getEntity());
                response.close();
            }
        }
    }
}

From source file:org.apache.openjpa.meta.AbstractCFMetaDataFactory.java

/**
 * Parse persistent type names.//from  w w  w.jav a 2 s . co  m
 */
protected Set<String> parsePersistentTypeNames(ClassLoader loader) throws IOException {
    ClassArgParser cparser = newClassArgParser();
    String[] clss;
    Set<String> names = new HashSet<String>();
    if (files != null) {
        File file;
        for (Iterator itr = files.iterator(); itr.hasNext();) {
            file = (File) itr.next();
            if ((AccessController.doPrivileged(J2DoPrivHelper.isDirectoryAction(file))).booleanValue()) {
                if (log.isTraceEnabled())
                    log.trace(_loc.get("scanning-directory", file));
                scan(new FileMetaDataIterator(file, newMetaDataFilter()), cparser, names, true, file);
            } else if (file.getName().endsWith(".jar")) {
                if (log.isTraceEnabled())
                    log.trace(_loc.get("scanning-jar", file));
                try {
                    ZipFile zFile = AccessController.doPrivileged(J2DoPrivHelper.newZipFileAction(file));
                    scan(new ZipFileMetaDataIterator(zFile, newMetaDataFilter()), cparser, names, true, file);
                } catch (PrivilegedActionException pae) {
                    throw (IOException) pae.getException();
                }
            } else {
                if (log.isTraceEnabled())
                    log.trace(_loc.get("scanning-file", file));
                clss = cparser.parseTypeNames(new FileMetaDataIterator(file));
                List<String> newNames = Arrays.asList(clss);
                if (log.isTraceEnabled())
                    log.trace(_loc.get("scan-found-names", newNames, file));
                names.addAll(newNames);
                File f = AccessController.doPrivileged(J2DoPrivHelper.getAbsoluteFileAction(file));
                try {
                    mapPersistentTypeNames(AccessController.doPrivileged(J2DoPrivHelper.toURLAction(f)), clss);
                } catch (PrivilegedActionException pae) {
                    throw (FileNotFoundException) pae.getException();
                }
            }
        }
    }
    URL url;
    if (urls != null) {
        for (Iterator itr = urls.iterator(); itr.hasNext();) {
            url = (URL) itr.next();
            if ("file".equals(url.getProtocol())) {
                File file = AccessController
                        .doPrivileged(J2DoPrivHelper.getAbsoluteFileAction(new File(url.getFile())));
                if (files != null && files.contains(file)) {
                    continue;
                } else if ((AccessController.doPrivileged(J2DoPrivHelper.isDirectoryAction(file)))
                        .booleanValue()) {
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scanning-directory", file));
                    scan(new FileMetaDataIterator(file, newMetaDataFilter()), cparser, names, true, file);
                    continue;
                }
            }
            if ("vfs".equals(url.getProtocol())) {
                if (log.isTraceEnabled()) {
                    log.trace(_loc.get("scanning-vfs-url", url));
                }

                final URLConnection conn = url.openConnection();
                final Object vfsContent = conn.getContent();
                final URL finalUrl = url;
                File file = AccessController.doPrivileged(new PrivilegedAction<File>() {
                    @SuppressWarnings({ "rawtypes", "unchecked" })
                    public File run() {
                        try {
                            Class virtualFileClass = Class.forName("org.jboss.vfs.VirtualFile");
                            Method getPhysicalFile = virtualFileClass.getDeclaredMethod("getPhysicalFile");
                            return (File) getPhysicalFile.invoke(vfsContent);
                        } catch (Exception e) {
                            log.error(_loc.get("while-scanning-vfs-url", finalUrl), e);
                        }
                        return null;
                    }
                });
                if (file != null)
                    scan(new FileMetaDataIterator(file, newMetaDataFilter()), cparser, names, true, file);

                continue;
            }
            if ("jar".equals(url.getProtocol())) {
                if (url.getPath().endsWith("!/")) {
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scanning-jar-url", url));
                    scan(new ZipFileMetaDataIterator(url, newMetaDataFilter()), cparser, names, true, url);
                } else {
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scanning-jar-url", url));
                    scan(new JarFileURLMetaDataIterator(url, newMetaDataFilter()), cparser, names, true, url);
                }
            } else if (url.getPath().endsWith(".jar")) {
                if (log.isTraceEnabled())
                    log.trace(_loc.get("scanning-jar-at-url", url));
                try {
                    InputStream is = (InputStream) AccessController
                            .doPrivileged(J2DoPrivHelper.openStreamAction(url));
                    scan(new ZipStreamMetaDataIterator(new ZipInputStream(is), newMetaDataFilter()), cparser,
                            names, true, url);
                } catch (PrivilegedActionException pae) {
                    throw (IOException) pae.getException();
                }
            } else {
                // Open an InputStream from the URL and sniff for a zip header.  If it is, then this is
                // a URL with a jar-formated InputStream, as per the JPA specification.  Otherwise, fall back
                // to URLMetaDataIterator.
                BufferedInputStream is = null;

                try {
                    is = new BufferedInputStream(
                            (InputStream) AccessController.doPrivileged(J2DoPrivHelper.openStreamAction(url)));
                } catch (PrivilegedActionException pae) {
                    throw (IOException) pae.getException();
                }

                // Check for zip header magic 0x50 0x4b 0x03 0x04
                is.mark(0);
                boolean zipHeaderMatch = is.read() == 0x50 && is.read() == 0x4b && is.read() == 0x03
                        && is.read() == 0x04;
                is.reset();

                if (zipHeaderMatch) {
                    // The URL provides a Jar-formatted InputStream, consume it with ZipStreamMetaDataIterator
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scanning-jar-at-url", url));
                    scan(new ZipStreamMetaDataIterator(new ZipInputStream(is), newMetaDataFilter()), cparser,
                            names, true, url);
                } else {
                    // Fall back to URLMetaDataIterator
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scanning-url", url));
                    clss = cparser.parseTypeNames(new URLMetaDataIterator(url));
                    List<String> newNames = Arrays.asList(clss);
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scan-found-names", newNames, url));
                    names.addAll(newNames);
                    mapPersistentTypeNames(url, clss);
                }
            }
        }
    }
    if (rsrcs != null) {
        String rsrc;
        MetaDataIterator mitr;
        for (Iterator itr = rsrcs.iterator(); itr.hasNext();) {
            rsrc = (String) itr.next();
            if (rsrc.endsWith(".jar")) {
                url = AccessController.doPrivileged(J2DoPrivHelper.getResourceAction(loader, rsrc));
                if (url != null) {
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scanning-jar-stream-url", url));
                    try {
                        InputStream is = (InputStream) AccessController
                                .doPrivileged(J2DoPrivHelper.openStreamAction(url));
                        scan(new ZipStreamMetaDataIterator(new ZipInputStream(is), newMetaDataFilter()),
                                cparser, names, true, url);
                    } catch (PrivilegedActionException pae) {
                        throw (IOException) pae.getException();
                    }
                }
            } else {
                if (log.isTraceEnabled())
                    log.trace(_loc.get("scanning-resource", rsrc));
                mitr = new ResourceMetaDataIterator(rsrc, loader);
                OpenJPAConfiguration conf = repos.getConfiguration();
                Map peMap = null;
                if (conf instanceof OpenJPAConfigurationImpl)
                    peMap = ((OpenJPAConfigurationImpl) conf).getPersistenceEnvironment();
                URL puUrl = peMap == null ? null : (URL) peMap.get(PERSISTENCE_UNIT_ROOT_URL);
                List<String> mappingFileNames = peMap == null ? null
                        : (List<String>) peMap.get(MAPPING_FILE_NAMES);
                List<URL> jars = peMap == null ? null : (List<URL>) peMap.get(JAR_FILE_URLS);
                String puUrlString = puUrl == null ? null : puUrl.toString();
                if (log.isTraceEnabled())
                    log.trace(_loc.get("pu-root-url", puUrlString));

                URL puORMUrl = null;
                try {
                    if (puUrlString != null) {
                        String puORMUrlStr = puUrlString + (puUrlString.endsWith("/") ? "" : "/") + rsrc;
                        puORMUrl = AccessController.doPrivileged(J2DoPrivHelper.createURL(puORMUrlStr));
                    }
                } catch (PrivilegedActionException e) {
                    throw new IOException("Error generating puORMUrlStr.", e.getCause());
                }

                List<URL> urls = new ArrayList<URL>(3);
                while (mitr.hasNext()) {
                    url = (URL) mitr.next();
                    String urlString = url.toString();
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("resource-url", urlString));
                    if (peMap != null) {
                        //OPENJPA-2102: decode the URL to remove such things a spaces (' ') encoded as '%20'
                        if (puUrlString != null && decode(urlString).indexOf(decode(puUrlString)) != -1) {
                            urls.add(url);
                        } else if (puORMUrl != null && puORMUrl.equals(url)) {
                            // Check URL equality to support encapsulating URL protocols
                            urls.add(url);
                        }
                        if (mappingFileNames != null && mappingFileNames.size() != 0) {
                            for (String mappingFileName : mappingFileNames) {
                                if (log.isTraceEnabled())
                                    log.trace(_loc.get("mapping-file-name", mappingFileName));
                                if (urlString.indexOf(mappingFileName) != -1)
                                    urls.add(url);
                            }
                        }

                        if (jars != null && jars.size() != 0) {
                            for (URL jarUrl : jars) {
                                if (log.isTraceEnabled())
                                    log.trace(_loc.get("jar-file-url", jarUrl));
                                if (urlString.indexOf(jarUrl.toString()) != -1)
                                    urls.add(url);
                            }
                        }
                    } else {
                        urls.add(url);
                    }
                }
                mitr.close();

                for (Object obj : urls) {
                    url = (URL) obj;
                    clss = cparser.parseTypeNames(new URLMetaDataIterator(url));
                    List<String> newNames = Arrays.asList(clss);
                    if (log.isTraceEnabled())
                        log.trace(_loc.get("scan-found-names", newNames, rsrc));
                    names.addAll(newNames);
                    mapPersistentTypeNames(url, clss);
                }
            }
        }
    }
    if (cpath != null) {
        String[] dirs = (String[]) cpath.toArray(new String[cpath.size()]);
        scan(new ClasspathMetaDataIterator(dirs, newMetaDataFilter()), cparser, names, true, dirs);
    }
    if (types != null)
        names.addAll(types);

    if (log.isTraceEnabled())
        log.trace(_loc.get("parse-found-names", names));

    return names;
}

From source file:com.microsoft.azure.storage.core.Utility.java

/**
 * /*from  w  w w .ja v  a 2s.com*/
 * Determines the size of an input stream, and optionally calculates the MD5 hash for the stream.
 * 
 * @param sourceStream
 *            A <code>InputStream</code> object that represents the stream to measure.
 * @param writeLength
 *            The number of bytes to read from the stream.
 * @param abandonLength
 *            The number of bytes to read before the analysis is abandoned. Set this value to <code>-1</code> to
 *            force the entire stream to be read. This parameter is provided to support upload thresholds.
 * @param rewindSourceStream
 *            <code>true</code> if the stream should be rewound after it is read; otherwise, <code>false</code>.
 * @param calculateMD5
 *            <code>true</code> if an MD5 hash will be calculated; otherwise, <code>false</code>.
 * 
 * @return A {@link StreamMd5AndLength} object that contains the stream length, and optionally the MD5 hash.
 * 
 * @throws IOException
 *             If an I/O error occurs.
 * @throws StorageException
 *             If a storage service error occurred.
 */
public static StreamMd5AndLength analyzeStream(final InputStream sourceStream, long writeLength,
        long abandonLength, final boolean rewindSourceStream, final boolean calculateMD5)
        throws IOException, StorageException {
    if (abandonLength < 0) {
        abandonLength = Long.MAX_VALUE;
    }

    if (rewindSourceStream) {
        if (!sourceStream.markSupported()) {
            throw new IllegalArgumentException(SR.INPUT_STREAM_SHOULD_BE_MARKABLE);
        }

        sourceStream.mark(Constants.MAX_MARK_LENGTH);
    }

    MessageDigest digest = null;
    if (calculateMD5) {
        try {
            digest = MessageDigest.getInstance("MD5");
        } catch (final NoSuchAlgorithmException e) {
            // This wont happen, throw fatal.
            throw Utility.generateNewUnexpectedStorageException(e);
        }
    }

    if (writeLength < 0) {
        writeLength = Long.MAX_VALUE;
    }

    final StreamMd5AndLength retVal = new StreamMd5AndLength();
    int count = -1;
    final byte[] retrievedBuff = new byte[Constants.BUFFER_COPY_LENGTH];

    int nextCopy = (int) Math.min(retrievedBuff.length, writeLength - retVal.getLength());
    count = sourceStream.read(retrievedBuff, 0, nextCopy);

    while (nextCopy > 0 && count != -1) {
        if (calculateMD5) {
            digest.update(retrievedBuff, 0, count);
        }
        retVal.setLength(retVal.getLength() + count);

        if (retVal.getLength() > abandonLength) {
            // Abandon operation
            retVal.setLength(-1);
            retVal.setMd5(null);
            break;
        }

        nextCopy = (int) Math.min(retrievedBuff.length, writeLength - retVal.getLength());
        count = sourceStream.read(retrievedBuff, 0, nextCopy);
    }

    if (retVal.getLength() != -1 && calculateMD5) {
        retVal.setMd5(Base64.encode(digest.digest()));
    }

    if (retVal.getLength() != -1 && writeLength > 0) {
        retVal.setLength(Math.min(retVal.getLength(), writeLength));
    }

    if (rewindSourceStream) {
        sourceStream.reset();
        sourceStream.mark(Constants.MAX_MARK_LENGTH);
    }

    return retVal;
}

From source file:org.docx4j.XmlUtils.java

public static Object unmarshal(InputStream is, JAXBContext jc) throws JAXBException {

    // Guard against XXE
    XMLInputFactory xif = XMLInputFactory.newInstance();
    xif.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
    xif.setProperty(XMLInputFactory.SUPPORT_DTD, false); // a DTD is merely ignored, its presence doesn't cause an exception
    XMLStreamReader xsr = null;//from  www  .j  a va  2s.co  m
    try {
        xsr = xif.createXMLStreamReader(is);
    } catch (XMLStreamException e) {
        throw new JAXBException(e);
    }

    Object o = null;
    Unmarshaller u = jc.createUnmarshaller();

    JaxbValidationEventHandler eventHandler = new JaxbValidationEventHandler();
    //      if (is.markSupported()) {
    //         // Only fail hard if we know we can restart
    //         eventHandler.setContinue(false);
    //      }
    u.setEventHandler(eventHandler);
    try {
        o = u.unmarshal(xsr);
        return o;
    } catch (UnmarshalException ue) {

        if (ue.getLinkedException() != null && ue.getLinkedException().getMessage().contains("entity")) {

            /*
               Caused by: javax.xml.stream.XMLStreamException: ParseError at [row,col]:[10,19]
               Message: The entity "xxe" was referenced, but not declared.
                  at com.sun.org.apache.xerces.internal.impl.XMLStreamReaderImpl.next(Unknown Source)
                  at com.sun.xml.internal.bind.v2.runtime.unmarshaller.StAXStreamConnector.bridge(Unknown Source)
                */
            log.error(ue.getMessage(), ue);
            throw ue;
        }

        if (is.markSupported()) {
            // When reading from zip, we use a ByteArrayInputStream,
            // which does support this.

            log.info("encountered unexpected content; pre-processing");
            eventHandler.setContinue(true);

            try {
                Templates mcPreprocessorXslt = JaxbValidationEventHandler.getMcPreprocessor();
                is.reset();
                JAXBResult result = XmlUtils.prepareJAXBResult(jc);
                XmlUtils.transform(new StreamSource(is), mcPreprocessorXslt, null, result);
                return //XmlUtils.unwrap(
                result.getResult();
            } catch (Exception e) {
                throw new JAXBException("Preprocessing exception", e);
            }

        } else {
            log.error(ue.getMessage(), ue);
            log.error(".. and mark not supported");
            throw ue;
        }
    }

}

From source file:com.facebook.imagepipeline.platform.DefaultDecoder.java

/**
 * Create a bitmap from an input stream.
 *
 * @param inputStream the InputStream/*w  ww .ja  va 2 s.c  o  m*/
 * @param options the {@link android.graphics.BitmapFactory.Options} used to decode the stream
 * @param regionToDecode optional image region to decode or null to decode the whole image
 * @param transformToSRGB whether to allow color space transformation to sRGB at load time
 * @return the bitmap
 */
private CloseableReference<Bitmap> decodeFromStream(InputStream inputStream, BitmapFactory.Options options,
        @Nullable Rect regionToDecode, final boolean transformToSRGB) {
    Preconditions.checkNotNull(inputStream);
    int targetWidth = options.outWidth;
    int targetHeight = options.outHeight;
    if (regionToDecode != null) {
        targetWidth = regionToDecode.width() / options.inSampleSize;
        targetHeight = regionToDecode.height() / options.inSampleSize;
    }
    int sizeInBytes = getBitmapSize(targetWidth, targetHeight, options);
    final Bitmap bitmapToReuse = mBitmapPool.get(sizeInBytes);
    if (bitmapToReuse == null) {
        throw new NullPointerException("BitmapPool.get returned null");
    }
    options.inBitmap = bitmapToReuse;

    // Performs transformation at load time to sRGB.
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && transformToSRGB) {
        options.inPreferredColorSpace = ColorSpace.get(ColorSpace.Named.SRGB);
    }

    Bitmap decodedBitmap = null;
    ByteBuffer byteBuffer = mDecodeBuffers.acquire();
    if (byteBuffer == null) {
        byteBuffer = ByteBuffer.allocate(DECODE_BUFFER_SIZE);
    }
    try {
        options.inTempStorage = byteBuffer.array();
        if (regionToDecode != null) {
            BitmapRegionDecoder bitmapRegionDecoder = null;
            try {
                bitmapToReuse.reconfigure(targetWidth, targetHeight, options.inPreferredConfig);
                bitmapRegionDecoder = BitmapRegionDecoder.newInstance(inputStream, true);
                decodedBitmap = bitmapRegionDecoder.decodeRegion(regionToDecode, options);
            } catch (IOException e) {
                FLog.e(TAG, "Could not decode region %s, decoding full bitmap instead.", regionToDecode);
            } finally {
                if (bitmapRegionDecoder != null) {
                    bitmapRegionDecoder.recycle();
                }
            }
        }
        if (decodedBitmap == null) {
            decodedBitmap = BitmapFactory.decodeStream(inputStream, null, options);
        }
    } catch (IllegalArgumentException e) {
        mBitmapPool.release(bitmapToReuse);
        // This is thrown if the Bitmap options are invalid, so let's just try to decode the bitmap
        // as-is, which might be inefficient - but it works.
        try {
            // We need to reset the stream first
            inputStream.reset();

            Bitmap naiveDecodedBitmap = BitmapFactory.decodeStream(inputStream);
            if (naiveDecodedBitmap == null) {
                throw e;
            }
            return CloseableReference.of(naiveDecodedBitmap, SimpleBitmapReleaser.getInstance());
        } catch (IOException re) {
            // We throw the original exception instead since it's the one causing this workaround in the
            // first place.
            throw e;
        }
    } catch (RuntimeException re) {
        mBitmapPool.release(bitmapToReuse);
        throw re;
    } finally {
        mDecodeBuffers.release(byteBuffer);
    }

    if (bitmapToReuse != decodedBitmap) {
        mBitmapPool.release(bitmapToReuse);
        decodedBitmap.recycle();
        throw new IllegalStateException();
    }

    return CloseableReference.of(decodedBitmap, mBitmapPool);
}

From source file:org.alfresco.rest.api.impl.NodesImpl.java

private void writeContent(NodeRef nodeRef, String fileName, InputStream stream, boolean guessEncoding) {
    try {//from ww  w.j a  v  a2 s .c om
        ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);

        String mimeType = mimetypeService.guessMimetype(fileName);
        if ((mimeType != null) && (!mimeType.equals(MimetypeMap.MIMETYPE_BINARY))) {
            // quick/weak guess based on file extension
            writer.setMimetype(mimeType);
        } else {
            // stronger guess based on file stream
            writer.guessMimetype(fileName);
        }

        InputStream is = null;

        if (guessEncoding) {
            is = new BufferedInputStream(stream);
            is.mark(1024);
            writer.setEncoding(guessEncoding(is, mimeType, false));
            try {
                is.reset();
            } catch (IOException ioe) {
                if (logger.isWarnEnabled()) {
                    logger.warn("Failed to reset stream after trying to guess encoding: " + ioe.getMessage());
                }
            }
        } else {
            is = stream;
        }

        writer.putContent(is);
    } catch (ContentQuotaException cqe) {
        throw new InsufficientStorageException();
    } catch (ContentLimitViolationException clv) {
        throw new RequestEntityTooLargeException(clv.getMessage());
    } catch (ContentIOException cioe) {
        if (cioe.getCause() instanceof NodeLockedException) {
            throw (NodeLockedException) cioe.getCause();
        }
        throw cioe;
    }
}

From source file:org.apache.tika.gui.TikaGUI.java

private void handleStream(InputStream input, Metadata md) throws Exception {
    StringWriter htmlBuffer = new StringWriter();
    StringWriter textBuffer = new StringWriter();
    StringWriter textMainBuffer = new StringWriter();
    StringWriter xmlBuffer = new StringWriter();
    StringBuilder metadataBuffer = new StringBuilder();

    ContentHandler handler = new TeeContentHandler(getHtmlHandler(htmlBuffer),
            getTextContentHandler(textBuffer), getTextMainContentHandler(textMainBuffer),
            getXmlContentHandler(xmlBuffer));

    context.set(DocumentSelector.class, new ImageDocumentSelector());

    input = TikaInputStream.get(new ProgressMonitorInputStream(this, "Parsing stream", input));

    if (input.markSupported()) {
        int mark = -1;
        if (input instanceof TikaInputStream) {
            if (((TikaInputStream) input).hasFile()) {
                mark = (int) ((TikaInputStream) input).getLength();
            }/* w  w  w.j av a2s .  c  om*/
        }
        if (mark == -1) {
            mark = MAX_MARK;
        }
        input.mark(mark);
    }
    parser.parse(input, handler, md, context);

    String[] names = md.names();
    Arrays.sort(names);
    for (String name : names) {
        for (String val : md.getValues(name)) {
            metadataBuffer.append(name);
            metadataBuffer.append(": ");
            metadataBuffer.append(val);
            metadataBuffer.append("\n");
        }
    }

    String name = md.get(Metadata.RESOURCE_NAME_KEY);
    if (name != null && name.length() > 0) {
        setTitle("Apache Tika: " + name);
    } else {
        setTitle("Apache Tika: unnamed document");
    }

    setText(metadata, metadataBuffer.toString());
    setText(xml, xmlBuffer.toString());
    setText(text, textBuffer.toString());
    setText(textMain, textMainBuffer.toString());
    setText(html, htmlBuffer.toString());
    if (!input.markSupported()) {
        setText(json, "InputStream does not support mark/reset for Recursive Parsing");
        layout.show(cards, "metadata");
        return;
    }
    boolean isReset = false;
    try {
        input.reset();
        isReset = true;
    } catch (IOException e) {
        setText(json,
                "Error during stream reset.\n" + "There's a limit of " + MAX_MARK
                        + " bytes for this type of processing in the GUI.\n"
                        + "Try the app with command line argument of -J.");
    }
    if (isReset) {
        RecursiveParserWrapper wrapper = new RecursiveParserWrapper(parser,
                new BasicContentHandlerFactory(BasicContentHandlerFactory.HANDLER_TYPE.BODY, -1));
        wrapper.parse(input, null, new Metadata(), new ParseContext());
        StringWriter jsonBuffer = new StringWriter();
        JsonMetadataList.setPrettyPrinting(true);
        JsonMetadataList.toJson(wrapper.getMetadata(), jsonBuffer);
        setText(json, jsonBuffer.toString());
    }
    layout.show(cards, "metadata");
}

From source file:ddf.content.endpoint.rest.ContentEndpoint.java

/**
 * Create an entry in the Content Repository and/or the Metadata Catalog based on the request's
 * directive. The input request is in multipart/form-data format, with the expected parts of the
 * body being the directive (STORE, PROCESS, STORE_AND_PROCESS), and the file, with optional
 * filename specified, followed by the contents to be stored. If the filename is not specified
 * for the contents in the body of the input request, then the default filename "file" will be
 * used, with the file extension determined based upon the MIME type.
 * //w  ww  . j av  a  2s  .co  m
 * A sample multipart/form-data request would look like: Content-Type: multipart/form-data;
 * boundary=ARCFormBoundaryfqeylm5unubx1or
 * 
 * --ARCFormBoundaryfqeylm5unubx1or Content-Disposition: form-data; name="directive"
 * 
 * STORE_AND_PROCESS --ARCFormBoundaryfqeylm5unubx1or-- Content-Disposition: form-data;
 * name="myfile.json"; filename="C:\DDF\geojson_valid.json" Content-Type:
 * application/json;id=geojson
 * 
 * <contents to store go here>
 * 
 * @param multipartBody
 *            the multipart/form-data formatted body of the request
 * @param requestUriInfo
 * @return
 * @throws ContentEndpointException
 */
@POST
@Path("/")
public Response create(MultipartBody multipartBody, @Context UriInfo requestUriInfo)
        throws ContentEndpointException {
    logger.trace("ENTERING: create");

    String directive = multipartBody.getAttachmentObject(DIRECTIVE_ATTACHMENT_CONTENT_ID, String.class);
    logger.debug("directive = " + directive);

    String contentUri = multipartBody.getAttachmentObject("contentUri", String.class);
    logger.debug("contentUri = " + contentUri);

    InputStream stream = null;
    String filename = null;
    String contentType = null;

    // TODO: For DDF-1970 (multiple files in single create request)
    // Would access List<Attachment> = multipartBody.getAllAttachments() and loop
    // through them getting all of the "file" attachments (and skipping the "directive")
    // But how to support a "contentUri" parameter *per* file attachment? Can it be
    // just another parameter to the name="file" Content-Disposition?
    Attachment contentPart = multipartBody.getAttachment(FILE_ATTACHMENT_CONTENT_ID);
    if (contentPart != null) {
        // Example Content-Type header:
        // Content-Type: application/json;id=geojson
        if (contentPart.getContentType() != null) {
            contentType = contentPart.getContentType().toString();
        }

        filename = contentPart.getContentDisposition()
                .getParameter(FILENAME_CONTENT_DISPOSITION_PARAMETER_NAME);

        // Only interested in attachments for file uploads. Any others should be covered by
        // the FormParam arguments.
        if (StringUtils.isEmpty(filename)) {
            logger.debug("No filename parameter provided - generating default filename");
            String fileExtension = DEFAULT_FILE_EXTENSION;
            try {
                fileExtension = mimeTypeMapper.getFileExtensionForMimeType(contentType); // DDF-2307
                if (StringUtils.isEmpty(fileExtension)) {
                    fileExtension = DEFAULT_FILE_EXTENSION;
                }
            } catch (MimeTypeResolutionException e) {
                logger.debug("Exception getting file extension for contentType = " + contentType);
            }
            filename = DEFAULT_FILE_NAME + fileExtension; // DDF-2263
            logger.debug("No filename parameter provided - default to " + filename);
        } else {
            filename = FilenameUtils.getName(filename);
        }

        // Get the file contents as an InputStream and ensure the stream is positioned
        // at the beginning
        try {
            stream = contentPart.getDataHandler().getInputStream();
            if (stream != null && stream.available() == 0) {
                stream.reset();
            }
        } catch (IOException e) {
            logger.warn("IOException reading stream from file attachment in multipart body", e);
        }
    } else {
        logger.debug("No file contents attachment found");
    }

    Response response = doCreate(stream, contentType, directive, filename, contentUri, requestUriInfo);

    logger.trace("EXITING: create");

    return response;
}

From source file:at.ac.tuwien.qse.sepm.dao.repo.impl.JpegSerializer.java

@Override
public void update(InputStream is, OutputStream os, PhotoMetadata metadata) throws DAOException {
    if (is == null)
        throw new IllegalArgumentException();
    if (os == null)
        throw new IllegalArgumentException();
    if (metadata == null)
        throw new IllegalArgumentException();
    LOGGER.debug("updating photo metadata {}", metadata);

    String tags = "travelimg";

    for (Tag element : metadata.getTags()) {
        tags += "/" + element.getName();
    }/*from  ww  w. ja  va2s .c  o m*/

    Rating rating = metadata.getRating();
    tags += "/rating|" + rating;

    Place place = metadata.getPlace();
    if (place != null) {
        tags += "/place|" + place.getCity() + "|" + place.getCountry() + "|" + place.getLatitude() + "|"
                + place.getLongitude();
    }

    Journey journey = metadata.getJourney();
    if (journey != null) {
        tags += "/journey|" + journey.getName() + "|" + journey.getStartDate().format(DATE_FORMATTER) + "|"
                + journey.getEndDate().format(DATE_FORMATTER);
    }

    Photographer photographer = metadata.getPhotographer();
    if (photographer != null) {
        tags += "/photographer|" + photographer.getName();
    }

    try {
        is.mark(Integer.MAX_VALUE);
        ImageMetadata imageData = Imaging.getMetadata(is, null);
        if (imageData == null) {
            LOGGER.debug("could not find image metadata");
            throw new DAOException("No metadata found.");
        }
        if (!(imageData instanceof JpegImageMetadata)) {
            LOGGER.debug("metadata is of unknown type");
            throw new DAOException("Metadata is of unknown type.");
        }

        JpegImageMetadata jpegData = (JpegImageMetadata) imageData;
        TiffOutputSet outputSet = new TiffOutputSet();
        TiffImageMetadata exifData = jpegData.getExif();
        if (exifData != null) {
            outputSet = exifData.getOutputSet();
        }

        TiffOutputDirectory exifDirectory = outputSet.getOrCreateExifDirectory();
        outputSet.setGPSInDegrees(metadata.getLongitude(), metadata.getLatitude());

        exifDirectory.removeField(ExifTagConstants.EXIF_TAG_DATE_TIME_ORIGINAL);
        exifDirectory.add(ExifTagConstants.EXIF_TAG_DATE_TIME_ORIGINAL,
                DATE_FORMATTER.format(metadata.getDatetime()));

        exifDirectory.removeField(ExifTagConstants.EXIF_TAG_USER_COMMENT);
        exifDirectory.add(ExifTagConstants.EXIF_TAG_USER_COMMENT, tags);

        is.reset();
        new ExifRewriter().updateExifMetadataLossless(is, os, outputSet);

    } catch (IOException | ImageReadException | ImageWriteException ex) {
        LOGGER.warn("failed updating metadata");
        throw new DAOException(ex);
    }

    LOGGER.debug("updated photo metadata");
}

From source file:com.adobe.phonegap.contentsync.Sync.java

private boolean unzipSync(File targetFile, String outputDirectory, ProgressEvent progress,
        CallbackContext callbackContext) {
    Log.d(LOG_TAG, "unzipSync called");
    Log.d(LOG_TAG, "zip = " + targetFile.getAbsolutePath());
    InputStream inputStream = null;
    ZipFile zip = null;//w w w .jav  a  2  s .com
    boolean anyEntries = false;
    try {
        synchronized (progress) {
            if (progress.isAborted()) {
                return false;
            }
        }

        zip = new ZipFile(targetFile);

        // Since Cordova 3.3.0 and release of File plugins, files are accessed via cdvfile://
        // Accept a path or a URI for the source zip.
        Uri zipUri = getUriForArg(targetFile.getAbsolutePath());
        Uri outputUri = getUriForArg(outputDirectory);

        CordovaResourceApi resourceApi = webView.getResourceApi();

        File tempFile = resourceApi.mapUriToFile(zipUri);
        if (tempFile == null || !tempFile.exists()) {
            sendErrorMessage("Zip file does not exist", UNZIP_ERROR, callbackContext);
        }

        File outputDir = resourceApi.mapUriToFile(outputUri);
        outputDirectory = outputDir.getAbsolutePath();
        outputDirectory += outputDirectory.endsWith(File.separator) ? "" : File.separator;
        if (outputDir == null || (!outputDir.exists() && !outputDir.mkdirs())) {
            sendErrorMessage("Could not create output directory", UNZIP_ERROR, callbackContext);
        }

        OpenForReadResult zipFile = resourceApi.openForRead(zipUri);
        progress.setStatus(STATUS_EXTRACTING);
        progress.setLoaded(0);
        progress.setTotal(zip.size());
        Log.d(LOG_TAG, "zip file len = " + zip.size());

        inputStream = new BufferedInputStream(zipFile.inputStream);
        inputStream.mark(10);
        int magic = readInt(inputStream);

        if (magic != 875721283) { // CRX identifier
            inputStream.reset();
        } else {
            // CRX files contain a header. This header consists of:
            //  * 4 bytes of magic number
            //  * 4 bytes of CRX format version,
            //  * 4 bytes of public key length
            //  * 4 bytes of signature length
            //  * the public key
            //  * the signature
            // and then the ordinary zip data follows. We skip over the header before creating the ZipInputStream.
            readInt(inputStream); // version == 2.
            int pubkeyLength = readInt(inputStream);
            int signatureLength = readInt(inputStream);

            inputStream.skip(pubkeyLength + signatureLength);
        }

        // The inputstream is now pointing at the start of the actual zip file content.
        ZipInputStream zis = new ZipInputStream(inputStream);
        inputStream = zis;

        ZipEntry ze;
        byte[] buffer = new byte[32 * 1024];

        while ((ze = zis.getNextEntry()) != null) {
            synchronized (progress) {
                if (progress.isAborted()) {
                    return false;
                }
            }

            anyEntries = true;
            String compressedName = ze.getName();

            if (ze.getSize() > getFreeSpace()) {
                return false;
            }

            if (ze.isDirectory()) {
                File dir = new File(outputDirectory + compressedName);
                dir.mkdirs();
            } else {
                File file = new File(outputDirectory + compressedName);
                file.getParentFile().mkdirs();
                if (file.exists() || file.createNewFile()) {
                    Log.w(LOG_TAG, "extracting: " + file.getPath());
                    FileOutputStream fout = new FileOutputStream(file);
                    int count;
                    while ((count = zis.read(buffer)) != -1) {
                        fout.write(buffer, 0, count);
                    }
                    fout.close();
                }

            }
            progress.addLoaded(1);
            updateProgress(callbackContext, progress);
            zis.closeEntry();
        }
    } catch (Exception e) {
        String errorMessage = "An error occurred while unzipping.";
        sendErrorMessage(errorMessage, UNZIP_ERROR, callbackContext);
        Log.e(LOG_TAG, errorMessage, e);
    } finally {
        if (inputStream != null) {
            try {
                inputStream.close();
            } catch (IOException e) {
            }
        }
        if (zip != null) {
            try {
                zip.close();
            } catch (IOException e) {
            }
        }
    }

    if (anyEntries)
        return true;
    else
        return false;
}