List of usage examples for java.io DataInputStream skip
public long skip(long n) throws IOException
n
bytes of data from the input stream. From source file:com.simiacryptus.mindseye.test.data.MNIST.java
private static Stream<byte[]> binaryStream(@Nonnull final String name, final int skip, final int recordSize) throws IOException { @Nullable/*from w w w . j av a 2 s . c o m*/ InputStream stream = null; try { stream = Util.cacheStream(TestUtil.S3_ROOT.resolve(name)); } catch (@Nonnull NoSuchAlgorithmException | KeyManagementException e) { throw new RuntimeException(e); } final byte[] fileData = IOUtils .toByteArray(new BufferedInputStream(new GZIPInputStream(new BufferedInputStream(stream)))); @Nonnull final DataInputStream in = new DataInputStream(new ByteArrayInputStream(fileData)); in.skip(skip); return MNIST.toIterator(new BinaryChunkIterator(in, recordSize)); }
From source file:com.simiacryptus.util.Util.java
/** * Binary stream stream.//from w w w. j a v a 2 s.c o m * * @param path the path * @param name the name * @param skip the skip * @param recordSize the record size * @return the stream * @throws IOException the io exception */ public static Stream<byte[]> binaryStream(final String path, @javax.annotation.Nonnull final String name, final int skip, final int recordSize) throws IOException { @javax.annotation.Nonnull final File file = new File(path, name); final byte[] fileData = IOUtils.toByteArray( new BufferedInputStream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(file))))); @javax.annotation.Nonnull final DataInputStream in = new DataInputStream(new ByteArrayInputStream(fileData)); in.skip(skip); return com.simiacryptus.util.Util.toIterator(new BinaryChunkIterator(in, recordSize)); }
From source file:org.carbondata.query.util.CacheUtil.java
private static Member[][] populateMemberCache(DataInputStream fileChannel, CarbonFile memberFile, String fileName, String dataType) throws IOException { // ByteBuffer toltalLength, memberLength, surrogateKey, bf3; // subtracted 4 as last 4 bytes will have the max value for no of // records/*from w w w . j a va 2 s . c o m*/ long currPositionIndex = 0; long size = memberFile.getSize() - 4; long skipSize = size; long actualSkipSize = 0; while (actualSkipSize != size) { actualSkipSize += fileChannel.skip(skipSize); skipSize = skipSize - actualSkipSize; } // LOGGER.debug(CarbonEngineLogEvent.UNIBI_CARBONENGINE_MSG, "Bytes skipped " + // skipSize); int maxVal = fileChannel.readInt(); CarbonUtil.closeStreams(fileChannel); fileChannel = FileFactory.getDataInputStream(fileName, FileFactory.getFileType(fileName)); // CHECKSTYLE:OFF Approval No:Approval-V1R2C10_005 ByteBuffer buffer = ByteBuffer.allocate((int) size); // CHECKSTYLE:OFF fileChannel.readFully(buffer.array()); int minVal = buffer.getInt(); int totalArraySize = maxVal - minVal + 1; Member[][] surogateKeyArrays = null; if (totalArraySize > CarbonCommonConstants.LEVEL_ARRAY_SIZE) { int div = totalArraySize / CarbonCommonConstants.LEVEL_ARRAY_SIZE; int rem = totalArraySize % CarbonCommonConstants.LEVEL_ARRAY_SIZE; if (rem > 0) { div++; } surogateKeyArrays = new Member[div][]; for (int i = 0; i < div - 1; i++) { surogateKeyArrays[i] = new Member[CarbonCommonConstants.LEVEL_ARRAY_SIZE]; } if (rem > 0) { surogateKeyArrays[surogateKeyArrays.length - 1] = new Member[rem]; } else { surogateKeyArrays[surogateKeyArrays.length - 1] = new Member[CarbonCommonConstants.LEVEL_ARRAY_SIZE]; } } else { surogateKeyArrays = new Member[1][totalArraySize]; } // Member[] surogateKeyArrays = new Member[maxVal-minVal+1]; // int surrogateKeyIndex = minVal; currPositionIndex += 4; // int current = 0; // CHECKSTYLE:OFF Approval No:Approval-V1R2C10_005 boolean enableEncoding = Boolean .valueOf(CarbonProperties.getInstance().getProperty(CarbonCommonConstants.ENABLE_BASE64_ENCODING, CarbonCommonConstants.ENABLE_BASE64_ENCODING_DEFAULT)); // CHECKSTYLE:ON int index = 0; int prvArrayIndex = 0; while (currPositionIndex < size) { int len = buffer.getInt(); // CHECKSTYLE:OFF Approval No:Approval-V1R2C10_005 // CHECKSTYLE:ON currPositionIndex += 4; byte[] rowBytes = new byte[len]; buffer.get(rowBytes); currPositionIndex += len; // No:Approval-361 if (enableEncoding) { rowBytes = Base64.decodeBase64(rowBytes); } surogateKeyArrays[current / CarbonCommonConstants.LEVEL_ARRAY_SIZE][index] = new Member(rowBytes); current++; if (current / CarbonCommonConstants.LEVEL_ARRAY_SIZE > prvArrayIndex) { prvArrayIndex++; index = 0; } else { index++; } } return surogateKeyArrays; }
From source file:com.wandisco.s3hdfs.rewrite.filter.S3HdfsTestUtil.java
void compareS3ObjectWithHdfsFile(InputStream objectStream, Path path, long rangeStart, long rangeEnd) throws IOException, ServiceException { FileStatus fsStat = hdfs.listStatus(path)[0]; int expectedSize = (int) (rangeEnd - rangeStart); int blockSize = (int) fsStat.getBlockSize(); int blocks = (int) Math.ceil((double) expectedSize / (double) blockSize); DataInputStream origStream = hdfs.open(path); assertEquals(origStream.skip(rangeStart), rangeStart); int size = 0; for (int i = 0; i < expectedSize; i++) { int A = origStream.read(); int B = objectStream.read(); if (A == -1 || B == -1) fail("Premature end of steam."); if (A != B) { fail("ERROR: Byte A: " + A + " Byte B: " + B + ", at offset: " + size); }/*w w w . jav a 2s .c o m*/ size++; } if (size != expectedSize) { fail("Incorrect size: " + size + ", expected: " + expectedSize); } System.out.println("File: " + path + " has " + blocks + " blocks."); System.out.println("File: " + path + " has " + blockSize + " blockSize."); System.out.println("File: " + path + " has " + expectedSize + " length."); System.out.println("SUCCESS! The files match up!"); }
From source file:org.apache.fop.afp.apps.FontPatternExtractor.java
/** * Extracts the Type1 PFB file from the given AFP outline font. * @param file the AFP file to read from * @param targetDir the target directory where the PFB file is to be placed. * @throws IOException if an I/O error occurs *///from w w w .j a v a2 s . co m public void extract(File file, File targetDir) throws IOException { InputStream in = new java.io.FileInputStream(file); try { MODCAParser parser = new MODCAParser(in); ByteArrayOutputStream baout = new ByteArrayOutputStream(); UnparsedStructuredField strucField; while ((strucField = parser.readNextStructuredField()) != null) { if (strucField.getSfTypeID() == 0xD3EE89) { byte[] sfData = strucField.getData(); println(strucField.toString()); HexDump.dump(sfData, 0, printStream, 0); baout.write(sfData); } } ByteArrayInputStream bin = new ByteArrayInputStream(baout.toByteArray()); DataInputStream din = new DataInputStream(bin); long len = din.readInt() & 0xFFFFFFFFL; println("Length: " + len); din.skip(4); //checksum int tidLen = din.readUnsignedShort() - 2; byte[] tid = new byte[tidLen]; din.readFully(tid); String filename = new String(tid, "ISO-8859-1"); int asciiCount1 = countUSAsciiCharacters(filename); String filenameEBCDIC = new String(tid, "Cp1146"); int asciiCount2 = countUSAsciiCharacters(filenameEBCDIC); println("TID: " + filename + " " + filenameEBCDIC); if (asciiCount2 > asciiCount1) { //Haven't found an indicator if the name is encoded in EBCDIC or not //so we use a trick. filename = filenameEBCDIC; } if (!filename.toLowerCase().endsWith(".pfb")) { filename = filename + ".pfb"; } println("Output filename: " + filename); File out = new File(targetDir, filename); OutputStream fout = new java.io.FileOutputStream(out); try { IOUtils.copyLarge(din, fout); } finally { IOUtils.closeQuietly(fout); } } finally { IOUtils.closeQuietly(in); } }
From source file:org.carbondata.query.util.CacheUtil.java
public static int getMaxValueFromLevelFile(String filesLocaton) { if (null == filesLocaton) { return 0; }//from w w w . jav a 2s . co m DataInputStream fileChannel = null; try { if (!FileFactory.isFileExist(filesLocaton, FileFactory.getFileType(filesLocaton))) { return 0; } fileChannel = new DataInputStream( FileFactory.getDataInputStream(filesLocaton, FileFactory.getFileType(filesLocaton), 10240)); CarbonFile memberFile = FileFactory.getCarbonFile(filesLocaton, FileFactory.getFileType(filesLocaton)); long size = memberFile.getSize() - 4; long skipSize = size; long actualSkipSize = 0; while (actualSkipSize != size) { actualSkipSize += fileChannel.skip(skipSize); skipSize = skipSize - actualSkipSize; } LOGGER.debug("Bytes skipped " + skipSize); int maxVal = fileChannel.readInt(); return maxVal; } catch (IOException e) { // e.printStackTrace(); LOGGER.error(e, e.getMessage()); } finally { CarbonUtil.closeStreams(fileChannel); } return 0; }
From source file:es.urjc.mctwp.image.impl.analyze.AnalyzeImagePlugin.java
private boolean isNifti(File file) throws IOException { boolean result = false; if (file != null) { DataInputStream stream = new DataInputStream(new FileInputStream(file)); stream.skip(344); byte check1 = stream.readByte(); byte check2 = stream.readByte(); byte check3 = stream.readByte(); byte check4 = stream.readByte(); stream.close();//www . j a v a 2 s .c o m result = (check1 == 0x6e) && ((check2 == 0x69) || (check2 == 0x2b)) && (check3 == 0x31) && (check4 == 0x00); } return result; }
From source file:org.veronicadb.core.memorygraph.storage.SimpleLocalFileStorageSink.java
/** * Skip bloom bytes if not needed to re-read bloom filter * @param stream// ww w.j av a2s . co m * @throws IOException */ protected void skipBloom(DataInputStream stream) throws IOException { int bytesToSkip = stream.readInt(); stream.skip(bytesToSkip); }
From source file:com.facebook.infrastructure.db.Column.java
public void skip(DataInputStream dis) throws IOException { /* read the column name */ dis.readUTF();//from ww w . j a v a 2 s . com /* boolean indicating if the column is deleted */ dis.readBoolean(); /* timestamp associated with the column */ dis.readLong(); /* size of the column */ int size = dis.readInt(); dis.skip(size); }
From source file:edu.cornell.med.icb.goby.alignments.perms.PermutationReader.java
private void makeIndex(FastBufferedInputStream inputStream) throws IOException { input.position(0);//from w w w . ja va2 s . com final ObjectArrayList<Block> blocks = new ObjectArrayList<Block>(); final DataInputStream dataInput = new DataInputStream( new FastBufferedInputStream(new FileInputStream(basename + ".perm"))); try { long offset = 0; while (dataInput.available() > 0) { final Block block = new Block(); block.offset = offset; block.n = dataInput.readInt(); block.firstSmallIndex = dataInput.readInt(); dataInput.skip(block.n * 4L); blocks.add(block); offset += block.n * 4L + 8L; } Collections.sort(blocks, SMALL_INDEX_COMPARATOR); indexBlocks = blocks.toArray(new Block[blocks.size()]); } finally { dataInput.close(); } }