List of usage examples for java.io BufferedInputStream reset
public synchronized void reset() throws IOException
reset
method of InputStream
. From source file:org.apache.hadoop.gateway.filter.rewrite.impl.UrlRewriteResponse.java
@Override public void streamResponse(InputStream input, OutputStream output) throws IOException { InputStream inStream;//from w w w. j av a2 s .com OutputStream outStream; boolean isGzip = false; BufferedInputStream inBuffer = new BufferedInputStream(input); try { // Use this way to check whether the input stream is gzip compressed, in case // the content encoding header is unknown, as it could be unset in inbound response inBuffer.mark(STREAM_BUFFER_SIZE); inStream = new GZIPInputStream(inBuffer); isGzip = true; } catch (ZipException e) { inBuffer.reset(); inStream = inBuffer; } catch (IOException e) { inBuffer.reset(); inStream = inBuffer; } MimeType mimeType = getMimeType(); UrlRewriteFilterContentDescriptor filterContentConfig = getRewriteFilterConfig(rewriter.getConfig(), bodyFilterName, mimeType); if (filterContentConfig != null) { String asType = filterContentConfig.asType(); if (asType != null && asType.trim().length() > 0) { mimeType = MimeTypes.create(asType, getCharacterEncoding()); } } InputStream filteredInput = UrlRewriteStreamFilterFactory.create(mimeType, null, inStream, rewriter, this, UrlRewriter.Direction.OUT, filterContentConfig); outStream = (isGzip) ? new GZIPOutputStream(output) : output; IOUtils.copyLarge(filteredInput, outStream, new byte[STREAM_BUFFER_SIZE]); //KNOX-685: outStream.flush(); outStream.close(); }
From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.sav.SAVFileReaderSpi.java
@Override public boolean canDecodeInput(Object source) throws IOException { out.println("this method is actually called: object"); if (!(source instanceof BufferedInputStream)) { return false; } else if (source instanceof File) { out.println("source is a File object"); } else {/*from w ww . j a v a2 s.c o m*/ out.println("not File object"); } if (source == null) { throw new IllegalArgumentException("source == null!"); } BufferedInputStream stream = (BufferedInputStream) source; dbgLog.fine("applying the sav test\n"); byte[] b = new byte[SAV_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, SAV_HEADER_SIZE); if (nbytes == 0) { throw new IOException(); } //printHexDump(b, "hex dump of the byte-array"); dbgLog.info("hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b))); if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; String hdr4sav = new String(b); dbgLog.fine("from string[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b)).toUpperCase()); if (hdr4sav.equals(SAV_FILE_SIGNATURE)) { dbgLog.fine("this file is spss-sav type"); return true; } else { dbgLog.fine("this file is NOT spss-sav type"); return false; } }
From source file:edu.harvard.iq.dataverse.ingest.metadataextraction.impl.plugins.fits.FITSFileMetadataExtractorSpi.java
@Override public boolean canDecodeInput(BufferedInputStream stream) throws IOException { if (stream == null) { throw new IllegalArgumentException("stream == null!"); }//ww w. ja va2s. co m byte[] b = new byte[FITS_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, FITS_HEADER_SIZE); if (nbytes == 0) { throw new IOException(); } //printHexDump(b, "hex dump of the byte-array"); dbgLog.info("hex dump of the 1st " + FITS_HEADER_SIZE + " bytes:" + (new String(Hex.encodeHex(b))).toUpperCase()); if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; String hdr4fits = new String(b); if (hdr4fits.equals(FITS_FILE_SIGNATURE)) { dbgLog.fine("this is a fits file"); return true; } else { dbgLog.fine("this is NOT a fits file"); return false; } }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReaderSpi.java
@Override public boolean canDecodeInput(Object source) throws IOException { dbgLog.fine("this method is actually called: object"); if (!(source instanceof BufferedInputStream)) { return false; } else if (source instanceof File) { dbgLog.fine("source is a File object"); } else {/*w w w . jav a2 s . c om*/ dbgLog.fine("not File object"); } if (source == null) { throw new IllegalArgumentException("source == null!"); } BufferedInputStream stream = (BufferedInputStream) source; dbgLog.fine("applying the sav test\n"); byte[] b = new byte[SAV_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, SAV_HEADER_SIZE); if (nbytes == 0) { throw new IOException(); } //printHexDump(b, "hex dump of the byte-array"); dbgLog.fine("hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b))); if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; String hdr4sav = new String(b); dbgLog.fine("from string[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b)).toUpperCase()); if (hdr4sav.equals(SAV_FILE_SIGNATURE)) { dbgLog.fine("this file is spss-sav type"); return true; } else { dbgLog.fine("this file is NOT spss-sav type"); return false; } }
From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.dta.DTAFileReaderSpi.java
@Override public boolean canDecodeInput(BufferedInputStream stream) throws IOException { if (stream == null) { throw new IllegalArgumentException("stream == null!"); }// ww w . j a v a2s.co m dbgLog.fine("applying the dta test\n"); byte[] b = new byte[DTA_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, DTA_HEADER_SIZE); if (nbytes == 0) { throw new IOException(); } //printHexDump(b, "hex dump of the byte-array"); if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; dbgLog.info("hex dump: 1st 4bytes =>" + new String(Hex.encodeHex(b)) + "<-"); if (b[2] != 1) { dbgLog.fine("3rd byte is not 1: given file is not stata-dta type"); return false; } else if ((b[1] != 1) && (b[1] != 2)) { dbgLog.fine("2nd byte is neither 0 nor 1: this file is not stata-dta type"); return false; } else if (!DTAFileReaderSpi.stataReleaseNumber.containsKey(b[0])) { dbgLog.fine("1st byte (" + b[0] + ") is not within the ingestable range [rel. 3-10]:" + "this file is NOT stata-dta type"); return false; } else { dbgLog.fine("this file is stata-dta type: " + DTAFileReaderSpi.stataReleaseNumber.get(b[0]) + "(No in HEX=" + b[0] + ")"); return true; } }
From source file:org.codelibs.fess.web.admin.DataAction.java
@Execute(validator = true, input = "index") public String upload() { final String fileName = dataForm.uploadedFile.getFileName(); if (fileName.endsWith(".xml")) { try {// w w w. j a v a 2s . c o m databaseService.importData(dataForm.uploadedFile.getInputStream(), dataForm.overwrite != null && "on".equalsIgnoreCase(dataForm.overwrite)); SAStrutsUtil.addSessionMessage("success.importing_data"); return "index?redirect=true"; } catch (final Exception e) { logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } } else if (fileName.endsWith(".csv")) { BufferedInputStream is = null; File tempFile = null; FileOutputStream fos = null; final byte[] b = new byte[20]; try { tempFile = File.createTempFile("fess-import-", ".csv"); is = new BufferedInputStream(dataForm.uploadedFile.getInputStream()); is.mark(20); if (is.read(b, 0, 20) <= 0) { throw new FessSystemException("no import data."); } is.reset(); fos = new FileOutputStream(tempFile); StreamUtil.drain(is, fos); } catch (final Exception e) { if (tempFile != null && !tempFile.delete()) { logger.warn("Could not delete " + tempFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(fos); } final File oFile = tempFile; try { final String head = new String(b, Constants.UTF_8); if (!head.startsWith("SessionId,") && !head.startsWith("SearchWord,") && !head.startsWith("SearchId,")) { logger.error("Unknown file: " + dataForm.uploadedFile); throw new SSCActionMessagesException("errors.unknown_import_file"); } final String enc = crawlerProperties.getProperty(Constants.CSV_FILE_ENCODING_PROPERTY, Constants.UTF_8); new Thread(new Runnable() { @Override public void run() { Reader reader = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(oFile), enc)); if (head.startsWith("SessionId,")) { // Crawling Session crawlingSessionService.importCsv(reader); } else if (head.startsWith("SearchWord,")) { // Search Log searchLogService.importCsv(reader); } else if (head.startsWith("SearchId,")) { // Click Log clickLogService.importCsv(reader); } } catch (final Exception e) { logger.error("Failed to import data.", e); throw new FessSystemException("Failed to import data.", e); } finally { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } IOUtils.closeQuietly(reader); } } }).start(); } catch (final ActionMessagesException e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } throw e; } catch (final Exception e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } } SAStrutsUtil.addSessionMessage("success.importing_data"); return "index?redirect=true"; }
From source file:slash.navigation.photo.PhotoFormat.java
public void read(InputStream source, ParserContext<Wgs84Route> context) throws Exception { BufferedInputStream bufferedSource = new BufferedInputStream(source, READ_BUFFER_SIZE); bufferedSource.mark(READ_BUFFER_SIZE); Dimension size = Imaging.getImageSize(bufferedSource, null); if (size == null) return;//from w ww.j a v a 2s . co m PhotoPosition position = new PhotoPosition(NotTaggable, context.getStartDate(), "No EXIF data", null); bufferedSource.reset(); ImageMetadata metadata = Imaging.getMetadata(bufferedSource, null); TiffImageMetadata tiffImageMetadata = extractTiffImageMetadata(metadata); if (tiffImageMetadata != null) { @SuppressWarnings("unchecked") List<Directory> directories = (List<Directory>) tiffImageMetadata.getDirectories(); for (Directory directory : directories) log.info("Reading EXIF directory " + directory); extendPosition(position, tiffImageMetadata, context.getStartDate()); } bufferedSource.reset(); File image = context.getFile(); if (image == null) image = extractToTempFile(bufferedSource); position.setOrigin(image); position.setWaypointType(Photo); context.appendRoute(new Wgs84Route(this, Waypoints, new ArrayList<Wgs84Position>(singletonList(position)))); }
From source file:org.paxle.tools.ieporter.cm.impl.ConfigurationIEPorter.java
public Map<String, Dictionary<String, Object>> importConfigurations(File file) throws Exception { BufferedInputStream input = null; Map<String, Dictionary<String, Object>> configs = new HashMap<String, Dictionary<String, Object>>(); try {/*from ww w . j a v a 2 s .c om*/ input = new BufferedInputStream(new FileInputStream(file), 5); // pre-read data to detect file type byte[] test = new byte[5]; input.mark(5); input.read(test); input.reset(); if (new String(test, "UTF-8").equals("<?xml")) { // XML Document found Document doc = this.readXMLDocument(file); Map<String, Dictionary<String, Object>> config = this.importConfigurations(doc); configs.putAll(config); } else if (new String(test, 0, 2).equals("PK")) { // open zip file final ZipInputStream zis = new ZipInputStream(input); // loop through entries ZipEntry ze; while ((ze = zis.getNextEntry()) != null) { // skip directories if (ze.isDirectory()) continue; // read data into memory long size = ze.getSize(); ByteArrayOutputStream bout = (size < 0) ? new ByteArrayOutputStream() : new ByteArrayOutputStream((int) size); IOUtils.copy(zis, bout); bout.close(); // read XML ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray()); Document doc = this.readXMLStream(bin); bin.close(); // parser configuration Map<String, Dictionary<String, Object>> config = this.importConfigurations(doc); configs.putAll(config); } zis.close(); } else { // Unknown file throw new IllegalArgumentException("Unknown file type"); } } finally { if (input != null) try { input.close(); } catch (Exception e) { /* ignore this */} } return configs; }
From source file:com.slytechs.capture.StreamFactory.java
public InputCapture<? extends CapturePacket> newInput(final File file, final Filter<ProtocolFilterTarget> filter) throws IOException { final BufferedInputStream b = new BufferedInputStream(new FileInputStream(file)); b.mark(1024); // Buffer first 1K of stream so we can rewind /*/*from ww w. ja v a2 s . c o m*/ * Check the stream, without decompression first */ if (formatType(Channels.newChannel(b)) != null) { b.close(); /* * This is a plain uncompressed file, open up a FileChannel. It will be * much faster */ return newInput(new RandomAccessFile(file, "rw").getChannel(), filter); } /* * Try with gunziped stream, second */ b.reset(); // Rewind if (formatType(Channels.newChannel(new GZIPInputStream(b))) != null) { b.close(); /* * Now reopen the same file, but this time without the buffered * inputstream in the middle. Try to make things as efficient as possible. * TODO: implement much faster channel based GZIP decompression algorithm */ return newInput(Channels.newChannel(new GZIPInputStream(new FileInputStream(file))), filter); } b.close(); return factoryForOther.getFactory().newInput(new RandomAccessFile(file, "r").getChannel(), filter); }
From source file:com.slytechs.capture.StreamFactory.java
public <T extends InputCapture<? extends FilePacket>> T newInput(final Class<T> t, final File file, Filter<ProtocolFilterTarget> filter) throws IOException { final BufferedInputStream b = new BufferedInputStream(new FileInputStream(file)); b.mark(1024); // Buffer first 1K of stream so we can rewind /*/*from w w w . j a v a 2s . c o m*/ * Check the stream, without decompression first */ if (formatType(Channels.newChannel(b)) != null) { b.close(); /* * This is a plain uncompressed file, open up a FileChannel. It will be * much faster */ return newInput(t, new RandomAccessFile(file, "rw").getChannel(), filter); } /* * Try with gunziped stream, second */ b.reset(); // Rewind if (formatType(Channels.newChannel(new GZIPInputStream(b))) != null) { b.close(); /* * Now reopen the same file, but this time without the buffered * inputstream in the middle. Try to make things as efficient as possible. * TODO: implement much faster channel based GZIP decompression algorithm */ return newInput(t, Channels.newChannel(new GZIPInputStream(new FileInputStream(file))), filter); } throw new IllegalArgumentException( "File is not any compressed or decompressed known format [" + file.getName() + "]"); }