List of usage examples for java.io SequenceInputStream SequenceInputStream
public SequenceInputStream(Enumeration<? extends InputStream> e)
SequenceInputStream
by remembering the argument, which must be an Enumeration
that produces objects whose run-time type is InputStream
. From source file:me.tatetian.hs.io.SamplableByteArrayOutputStream.java
/** * Gets the current contents of this byte stream as a Input Stream. The * returned stream is backed by buffers of <code>this</code> stream, * avoiding memory allocation and copy, thus saving space and time.<br> * //w ww . j av a 2 s . c o m * @return the current contents of this output stream. * @see java.io.ByteArrayOutputStream#toByteArray() * @see #reset() * @since Commons IO 2.0 */ private InputStream toBufferedInputStream() { int remaining = count; if (remaining == 0) { return new ClosedInputStream(); } List<ByteArrayInputStream> list = new ArrayList<ByteArrayInputStream>(buffers.size()); for (byte[] buf : buffers) { int c = Math.min(buf.length, remaining); list.add(new ByteArrayInputStream(buf, 0, c)); remaining -= c; if (remaining == 0) { break; } } return new SequenceInputStream(Collections.enumeration(list)); }
From source file:org.apache.sling.servlets.post.impl.helper.SlingFileUploadHandler.java
/** * Merge all previous chunks with last chunk's stream into a temporary file * and return it./* w w w . j a v a 2 s.c o m*/ */ private File mergeChunks(final Node parentNode, final InputStream lastChunkStream) throws PersistenceException, RepositoryException { OutputStream out = null; SequenceInputStream mergeStrm = null; File file = null; try { file = File.createTempFile("tmp-", "-mergechunk"); out = new FileOutputStream(file); String startPattern = SlingPostConstants.CHUNK_NODE_NAME + "_" + "0_*"; NodeIterator nodeItr = parentNode.getNodes(startPattern); Set<InputStream> inpStrmSet = new LinkedHashSet<InputStream>(); while (nodeItr.hasNext()) { if (nodeItr.getSize() > 1) { throw new RepositoryException("more than one node found for pattern: " + startPattern); } Node rangeNode = nodeItr.nextNode(); inpStrmSet.add(rangeNode.getProperty(javax.jcr.Property.JCR_DATA).getBinary().getStream()); log.debug("added chunk {} to merge stream", rangeNode.getName()); String[] indexBounds = rangeNode.getName() .substring((SlingPostConstants.CHUNK_NODE_NAME + "_").length()).split("_"); startPattern = SlingPostConstants.CHUNK_NODE_NAME + "_" + String.valueOf(Long.valueOf(indexBounds[1]) + 1) + "_*"; nodeItr = parentNode.getNodes(startPattern); } inpStrmSet.add(lastChunkStream); mergeStrm = new SequenceInputStream(Collections.enumeration(inpStrmSet)); IOUtils.copyLarge(mergeStrm, out); } catch (IOException e) { throw new PersistenceException("excepiton occured", e); } finally { IOUtils.closeQuietly(out); IOUtils.closeQuietly(mergeStrm); } return file; }
From source file:com.warfrog.bitmapallthethings.BattEngine.java
private void generateBitmap(String inputName, String outputName) throws Exception { System.out.println("Generating " + outputName); File input = new File(inputName); int size = (int) new FileInputStream(inputName).getChannel().size(); if (size > getMaxFileSize()) { System.err.println(//from w w w .j a va 2 s . c o m "ERROR: Skipping " + inputName + " the file size is larger than the maximum size allowed."); return; } int height = (size / (getBytesPerPixel() / 8)) / getWidth(); int fillerBytes = (size / (getBytesPerPixel() / 8)) % getWidth(); //encode (repeat this for each file in a directory) InputStream header = generateBitmapHeader(getWidth(), height, size, fillerBytes); InputStream file = generateFileInputStream(inputName); InputStream filler = generateFillerStream(fillerBytes); Vector<InputStream> inputStreams = new Vector<InputStream>(); inputStreams.add(header); inputStreams.add(file); inputStreams.add(filler); SequenceInputStream inputStream = new SequenceInputStream(inputStreams.elements()); Files.copy(inputStream, new File(outputName).toPath(), StandardCopyOption.REPLACE_EXISTING); }
From source file:org.apache.xmlgraphics.image.codec.png.PNGImageDecoder.java
private void parse_IEND_chunk(final PNGChunk chunk) { // Store text strings final int textLen = this.textKeys.size(); final String[] textArray = new String[2 * textLen]; for (int i = 0; i < textLen; ++i) { final String key = this.textKeys.get(i); final String val = this.textStrings.get(i); textArray[2 * i] = key;//w w w . j a va2 s . c o m textArray[2 * i + 1] = val; if (this.emitProperties) { final String uniqueKey = "text_" + i + ':' + key; this.properties.put(uniqueKey.toLowerCase(), val); } } if (this.encodeParam != null) { this.encodeParam.setText(textArray); } // Store compressed text strings final int ztextLen = this.ztextKeys.size(); final String[] ztextArray = new String[2 * ztextLen]; for (int i = 0; i < ztextLen; ++i) { final String key = this.ztextKeys.get(i); final String val = this.ztextStrings.get(i); ztextArray[2 * i] = key; ztextArray[2 * i + 1] = val; if (this.emitProperties) { final String uniqueKey = "ztext_" + i + ':' + key; this.properties.put(uniqueKey.toLowerCase(), val); } } if (this.encodeParam != null) { this.encodeParam.setCompressedText(ztextArray); } // Parse prior IDAT chunks final InputStream seqStream = new SequenceInputStream(Collections.enumeration(this.streamVec)); final InputStream infStream = new InflaterInputStream(seqStream, new Inflater()); this.dataStream = new DataInputStream(infStream); // Create an empty WritableRaster int depth = this.bitDepth; if (this.colorType == PNG_COLOR_GRAY && this.bitDepth < 8 && this.output8BitGray) { depth = 8; } if (this.colorType == PNG_COLOR_PALETTE && this.expandPalette) { depth = 8; } final int bytesPerRow = (this.outputBands * this.width * depth + 7) / 8; final int scanlineStride = depth == 16 ? bytesPerRow / 2 : bytesPerRow; this.theTile = createRaster(this.width, this.height, this.outputBands, scanlineStride, depth); if (this.performGammaCorrection && this.gammaLut == null) { initGammaLut(this.bitDepth); } if (this.postProcess == POST_GRAY_LUT || this.postProcess == POST_GRAY_LUT_ADD_TRANS || this.postProcess == POST_GRAY_LUT_ADD_TRANS_EXP) { initGrayLut(this.bitDepth); } decodeImage(this.interlaceMethod == 1); this.sampleModel = this.theTile.getSampleModel(); if (this.colorType == PNG_COLOR_PALETTE && !this.expandPalette) { if (this.outputHasAlphaPalette) { this.colorModel = new IndexColorModel(this.bitDepth, this.paletteEntries, this.redPalette, this.greenPalette, this.bluePalette, this.alphaPalette); } else { this.colorModel = new IndexColorModel(this.bitDepth, this.paletteEntries, this.redPalette, this.greenPalette, this.bluePalette); } } else if (this.colorType == PNG_COLOR_GRAY && this.bitDepth < 8 && !this.output8BitGray) { final byte[] palette = this.expandBits[this.bitDepth]; this.colorModel = new IndexColorModel(this.bitDepth, palette.length, palette, palette, palette); } else { this.colorModel = createComponentColorModel(this.sampleModel); } }
From source file:org.apache.xmlgraphics.image.codec.png.PNGRed.java
private void parse_IEND_chunk(final PNGChunk chunk) throws IOException { // Store text strings final int textLen = this.textKeys.size(); final String[] textArray = new String[2 * textLen]; for (int i = 0; i < textLen; ++i) { final String key = this.textKeys.get(i); final String val = this.textStrings.get(i); textArray[2 * i] = key;/* w ww . j av a 2 s. c o m*/ textArray[2 * i + 1] = val; if (this.emitProperties) { final String uniqueKey = "text_" + i + ':' + key; this.properties.put(uniqueKey.toLowerCase(), val); } } if (this.encodeParam != null) { this.encodeParam.setText(textArray); } // Store compressed text strings final int ztextLen = this.ztextKeys.size(); final String[] ztextArray = new String[2 * ztextLen]; for (int i = 0; i < ztextLen; ++i) { final String key = this.ztextKeys.get(i); final String val = this.ztextStrings.get(i); ztextArray[2 * i] = key; ztextArray[2 * i + 1] = val; if (this.emitProperties) { final String uniqueKey = "ztext_" + i + ':' + key; this.properties.put(uniqueKey.toLowerCase(), val); } } if (this.encodeParam != null) { this.encodeParam.setCompressedText(ztextArray); } // Parse prior IDAT chunks final InputStream seqStream = new SequenceInputStream(Collections.enumeration(this.streamVec)); final InputStream infStream = new InflaterInputStream(seqStream, new Inflater()); this.dataStream = new DataInputStream(infStream); // Create an empty WritableRaster int depth = this.bitDepth; if (this.colorType == PNG_COLOR_GRAY && this.bitDepth < 8 && this.output8BitGray) { depth = 8; } if (this.colorType == PNG_COLOR_PALETTE && this.expandPalette) { depth = 8; } final int width = this.bounds.width; final int height = this.bounds.height; final int bytesPerRow = (this.outputBands * width * depth + 7) / 8; final int scanlineStride = depth == 16 ? bytesPerRow / 2 : bytesPerRow; this.theTile = createRaster(width, height, this.outputBands, scanlineStride, depth); if (this.performGammaCorrection && this.gammaLut == null) { initGammaLut(this.bitDepth); } if (this.postProcess == POST_GRAY_LUT || this.postProcess == POST_GRAY_LUT_ADD_TRANS || this.postProcess == POST_GRAY_LUT_ADD_TRANS_EXP) { initGrayLut(this.bitDepth); } decodeImage(this.interlaceMethod == 1); // Free resources associated with compressed data. this.dataStream.close(); infStream.close(); seqStream.close(); this.streamVec = null; final SampleModel sm = this.theTile.getSampleModel(); ColorModel cm; if (this.colorType == PNG_COLOR_PALETTE && !this.expandPalette) { if (this.outputHasAlphaPalette) { cm = new IndexColorModel(this.bitDepth, this.paletteEntries, this.redPalette, this.greenPalette, this.bluePalette, this.alphaPalette); } else { cm = new IndexColorModel(this.bitDepth, this.paletteEntries, this.redPalette, this.greenPalette, this.bluePalette); } } else if (this.colorType == PNG_COLOR_GRAY && this.bitDepth < 8 && !this.output8BitGray) { final byte[] palette = this.expandBits[this.bitDepth]; cm = new IndexColorModel(this.bitDepth, palette.length, palette, palette, palette); } else { cm = createComponentColorModel(sm); } init((CachableRed) null, this.bounds, cm, sm, 0, 0, this.properties); }
From source file:it.unimi.dsi.sux4j.io.ChunkedHashStore.java
/** Returns an iterator over the chunks of this chunked hash store. * * @return an iterator over the chunks of this chunked hash store. *//*from w w w . j ava 2 s .co m*/ public Iterator<Chunk> iterator() { if (closed) throw new IllegalStateException("This " + getClass().getSimpleName() + " has been closed "); for (DataOutputStream d : dos) try { d.flush(); } catch (IOException e) { throw new RuntimeException(e); } int m = 0; for (int i = 0; i < virtualDiskChunks; i++) { int s = 0; for (int j = 0; j < diskChunkStep; j++) s += count[i * diskChunkStep + j]; if (s > m) m = s; } final int maxCount = m; return new AbstractObjectIterator<Chunk>() { private int chunk; private FastBufferedInputStream fbis; private int last; private int chunkSize; private final long[] buffer0 = new long[maxCount]; private final long[] buffer1 = new long[maxCount]; private final long[] buffer2 = new long[maxCount]; private final long[] data = hashMask != 0 ? null : new long[maxCount]; public boolean hasNext() { return chunk < chunks; } @SuppressWarnings("unchecked") public Chunk next() { if (!hasNext()) throw new NoSuchElementException(); final long[] buffer0 = this.buffer0; if (chunk % (chunks / virtualDiskChunks) == 0) { final int diskChunk = (int) (chunk / (chunks / virtualDiskChunks)); final long[] buffer1 = this.buffer1, buffer2 = this.buffer2; chunkSize = 0; try { if (diskChunkStep == 1) { fbis = new FastBufferedInputStream(new FileInputStream(file[diskChunk])); chunkSize = count[diskChunk]; } else { final FileInputStream[] fis = new FileInputStream[diskChunkStep]; for (int i = 0; i < fis.length; i++) { fis[i] = new FileInputStream(file[diskChunk * diskChunkStep + i]); chunkSize += count[diskChunk * diskChunkStep + i]; } fbis = new FastBufferedInputStream(new SequenceInputStream( new IteratorEnumeration(Arrays.asList(fis).iterator()))); } final DataInputStream dis = new DataInputStream(fbis); final long triple[] = new long[3]; int count = 0; for (int j = 0; j < chunkSize; j++) { triple[0] = dis.readLong(); triple[1] = dis.readLong(); triple[2] = dis.readLong(); if (DEBUG) System.err.println("From disk: " + Arrays.toString(triple)); if (filter == null || filter.evaluate(triple)) { buffer0[count] = triple[0]; buffer1[count] = triple[1]; buffer2[count] = triple[2]; if (hashMask == 0) data[count] = dis.readLong(); count++; } else if (hashMask == 0) dis.readLong(); // Discard data } chunkSize = count; dis.close(); } catch (IOException e) { throw new RuntimeException(e); } it.unimi.dsi.fastutil.Arrays.quickSort(0, chunkSize, new AbstractIntComparator() { private static final long serialVersionUID = 0L; public int compare(final int x, final int y) { int t = Long.signum(buffer0[x] - buffer0[y]); if (t != 0) return t; t = Long.signum(buffer1[x] - buffer1[y]); if (t != 0) return t; return Long.signum(buffer2[x] - buffer2[y]); } }, new Swapper() { public void swap(final int x, final int y) { final long e0 = buffer0[x], e1 = buffer1[x], e2 = buffer2[x]; buffer0[x] = buffer0[y]; buffer1[x] = buffer1[y]; buffer2[x] = buffer2[y]; buffer0[y] = e0; buffer1[y] = e1; buffer2[y] = e2; if (hashMask == 0) { final long v = data[x]; data[x] = data[y]; data[y] = v; } } }); if (DEBUG) { for (int i = 0; i < chunkSize; i++) System.err.println(buffer0[i] + ", " + buffer1[i] + ", " + buffer2[i]); } if (!checkedForDuplicates && chunkSize > 1) for (int i = chunkSize - 1; i-- != 0;) if (buffer0[i] == buffer0[i + 1] && buffer1[i] == buffer1[i + 1] && buffer2[i] == buffer2[i + 1]) throw new ChunkedHashStore.DuplicateException(); if (chunk == chunks - 1) checkedForDuplicates = true; last = 0; } final int start = last; while (last < chunkSize && (chunkShift == Long.SIZE ? 0 : buffer0[last] >>> chunkShift) == chunk) last++; chunk++; return new Chunk(buffer0, buffer1, buffer2, data, hashMask, start, last); } }; }
From source file:uk.ac.soton.simulation.jsit.core.ModelVersioningAssistant.java
String calcMD5HashForFileList(List<File> pathsToHash, boolean includeHiddenFiles, File[] fileExclusions, String[] filenameExclusions, boolean printFileNames) { Vector<FileInputStream> fileStreams = new Vector<FileInputStream>(); ArrayList<File> workingFileExcludeList = null; if (printFileNames) { logger.info("Found files for hashing:"); }/*from w w w.j av a 2s . c o m*/ try { if (fileExclusions != null) { workingFileExcludeList = new ArrayList<File>(fileExclusions.length); for (File f : fileExclusions) { assert f.exists(); workingFileExcludeList.add(f.getAbsoluteFile()); } } for (File currPath : pathsToHash) { assert currPath.exists(); if (currPath.isDirectory()) { collectInputStreams(currPath, fileStreams, workingFileExcludeList, filenameExclusions, includeHiddenFiles, printFileNames); } else if (currPath.isFile()) { if (printFileNames) { logger.info("\t" + currPath.getAbsolutePath()); } if (!fileIsExcluded(includeHiddenFiles, workingFileExcludeList, filenameExclusions, currPath)) { fileStreams.add(new FileInputStream(currPath)); } } else { assert false; // Should never happen! } } SequenceInputStream seqStream = new SequenceInputStream(fileStreams.elements()); String md5Hash = DigestUtils.md5Hex(seqStream); seqStream.close(); return md5Hash; } catch (IOException e) { throw new VersionControlException("Error reading files to hash", e); } }
From source file:com.linkedin.databus2.relay.GoldenGateEventProducer.java
/** * The method takes the an inputstream as an input and wraps it around with xml tags, * sets the xml encoding and xml version specified in the physical sources config. * @param compositeInputStream The inputstream to be wrapped with the xml tags * @return//from w w w . j a va2 s . co m */ private InputStream wrapStreamWithXmlTags(InputStream compositeInputStream) { String xmlVersion = _pConfig.getXmlVersion(); String xmlEncoding = _pConfig.getXmlEncoding(); String xmlStart = "<?xml version=\"" + xmlVersion + "\" encoding=\"" + xmlEncoding + "\"?>\n<root>"; String xmlEnd = "</root>"; _log.info("The xml start tag used is:" + xmlStart); List xmlTagsList = Arrays.asList(new InputStream[] { new ByteArrayInputStream(xmlStart.getBytes(Charset.forName(xmlEncoding))), compositeInputStream, new ByteArrayInputStream(xmlEnd.getBytes(Charset.forName(xmlEncoding))), }); Enumeration<InputStream> streams = Collections.enumeration(xmlTagsList); SequenceInputStream seqStream = new SequenceInputStream(streams); return seqStream; }
From source file:org.languagetool.rules.de.GermanSpellerRule.java
@Nullable private static MorfologikMultiSpeller getSpeller(Language language, UserConfig userConfig, String languageVariantPlainTextDict) { if (!language.getShortCode().equals(Locale.GERMAN.getLanguage())) { throw new IllegalArgumentException("Language is not a variant of German: " + language); }// ww w. ja v a 2 s .c o m try { String morfoFile = "/de/hunspell/de_" + language.getCountries()[0] + ".dict"; if (JLanguageTool.getDataBroker().resourceExists(morfoFile)) { // spell data will not exist in LibreOffice/OpenOffice context List<String> paths = Arrays.asList("/de/hunspell/spelling.txt"); StringBuilder concatPaths = new StringBuilder(); List<InputStream> streams = new ArrayList<>(); for (String path : paths) { concatPaths.append(path).append(";"); streams.add(JLanguageTool.getDataBroker().getFromResourceDirAsStream(path)); } try (BufferedReader br = new BufferedReader( new InputStreamReader(new SequenceInputStream(Collections.enumeration(streams)), UTF_8))) { BufferedReader variantReader = null; if (languageVariantPlainTextDict != null && !languageVariantPlainTextDict.isEmpty()) { InputStream variantStream = JLanguageTool.getDataBroker() .getFromResourceDirAsStream(languageVariantPlainTextDict); variantReader = new ExpandingReader( new BufferedReader(new InputStreamReader(variantStream, UTF_8))); } return new MorfologikMultiSpeller(morfoFile, new ExpandingReader(br), concatPaths.toString(), variantReader, languageVariantPlainTextDict, userConfig != null ? userConfig.getAcceptedWords() : Collections.emptyList(), MAX_EDIT_DISTANCE); } } else { return null; } } catch (IOException e) { throw new RuntimeException("Could not set up morfologik spell checker", e); } }