List of usage examples for java.io DataInput readFully
void readFully(byte b[], int off, int len) throws IOException;
From source file:edu.umd.cloud9.collection.wikipedia.WikipediaPageOld.java
/** * Serializes this object./*from w w w. j a v a 2s. c o m*/ */ public void readFields(DataInput in) throws IOException { int length = WritableUtils.readVInt(in); byte[] bytes = new byte[length]; in.readFully(bytes, 0, length); WikipediaPageOld.readPage(this, new String(bytes, "UTF-8")); language = in.readUTF(); }
From source file:org.apache.arrow.vector.util.Text.java
/** * Read a Text object whose length is already known. This allows creating Text from a stream which uses a different * serialization format./*from w w w.j a v a 2 s .c o m*/ */ public void readWithKnownLength(DataInput in, int len) throws IOException { setCapacity(len, false); in.readFully(bytes, 0, len); length = len; }
From source file:io.Text.java
/** deserialize *//* w ww .j av a 2s . c o m*/ public void readFields(DataInput in) throws IOException { int newLength = WritableUtils.readVInt(in); setCapacity(newLength, false); in.readFully(bytes, 0, newLength); length = newLength; }
From source file:cn.iie.haiep.hbase.value.Bytes.java
/** * Read byte-array written with a WritableableUtils.vint prefix. * @param in Input to read from./* w w w . j ava 2s.c om*/ * @return byte array read off <code>in</code> * @throws IOException e */ public static byte[] readByteArray(final DataInput in) throws IOException { int len = WritableUtils.readVInt(in); if (len < 0) { throw new NegativeArraySizeException(Integer.toString(len)); } byte[] result = new byte[len]; in.readFully(result, 0, len); return result; }
From source file:de.hpi.fgis.hdrs.Triple.java
@Override public void readFields(DataInput in) throws IOException { // read header Slen = in.readShort();//from w w w. j a v a 2s . c o m Plen = in.readShort(); Olen = in.readInt(); multiplicity = in.readInt(); // read data //int size = Slen + (int) Plen + Olen; int size = bufferSize(); buffer = new byte[size]; in.readFully(buffer, 0, size); }
From source file:edu.umn.cs.spatialHadoop.core.RTree.java
@Override public void readFields(DataInput in) throws IOException { // Tree size (Header + structure + data) treeSize = in.readInt();/*from w ww .j a va 2 s .c o m*/ if (treeSize == 0) { height = elementCount = 0; return; } // Read only the tree structure in memory while actual records remain on // disk and loaded when necessary height = in.readInt(); if (height == 0) return; degree = in.readInt(); elementCount = in.readInt(); // Keep only tree structure in memory nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1)); int structureSize = nodeCount * NodeSize; byte[] treeStructure = new byte[structureSize]; in.readFully(treeStructure, 0, structureSize); structure = new FSDataInputStream(new MemoryInputStream(treeStructure)); if (in instanceof FSDataInputStream) { this.treeStartOffset = ((FSDataInputStream) in).getPos() - structureSize - TreeHeaderSize; this.data = (FSDataInputStream) in; } else { // Load all tree data in memory this.treeStartOffset = 0 - structureSize - TreeHeaderSize; int treeDataSize = treeSize - TreeHeaderSize - structureSize; byte[] treeData = new byte[treeDataSize]; in.readFully(treeData, 0, treeDataSize); this.data = new FSDataInputStream(new MemoryInputStream(treeData)); } nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); leafNodeCount = (int) Math.pow(degree, height - 1); nonLeafNodeCount = nodeCount - leafNodeCount; }
From source file:com.ricemap.spateDB.core.RTree.java
@Override public void readFields(DataInput in) throws IOException { // Tree size (Header + structure + data) treeSize = in.readInt();/* www . j a v a 2s.co m*/ if (treeSize == 0) { height = elementCount = 0; return; } // Read only the tree structure in memory while actual records remain on // disk and loaded when necessary height = in.readInt(); if (height == 0) return; degree = in.readInt(); elementCount = in.readInt(); columnar = in.readInt() == 1; // Keep only tree structure in memory nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1)); int structureSize = nodeCount * NodeSize; byte[] treeStructure = new byte[structureSize]; in.readFully(treeStructure, 0, structureSize); structure = new FSDataInputStream(new MemoryInputStream(treeStructure)); if (in instanceof FSDataInputStream) { this.treeStartOffset = ((FSDataInputStream) in).getPos() - structureSize - TreeHeaderSize; this.data = (FSDataInputStream) in; } else { // Load all tree data in memory this.treeStartOffset = 0 - structureSize - TreeHeaderSize; int treeDataSize = treeSize - TreeHeaderSize - structureSize; byte[] treeData = new byte[treeDataSize]; in.readFully(treeData, 0, treeDataSize); this.data = new FSDataInputStream(new MemoryInputStream(treeData)); } nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); leafNodeCount = (int) Math.pow(degree, height - 1); nonLeafNodeCount = nodeCount - leafNodeCount; }
From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java
@Override public void readFields(DataInput in) throws IOException { // Read the whole tree structure and keep it in memory. Leave data on disk // Tree size (Header + structure + data) treeSize = in.readInt();/*from w w w .jav a2s . com*/ if (in instanceof Seekable) this.treeStartOffset = ((Seekable) in).getPos(); if (treeSize == 0) { height = elementCount = 0; return; } // Read only the tree structure in memory while actual records remain on // disk and loaded when necessary height = in.readInt(); if (height == 0) return; degree = in.readInt(); elementCount = in.readInt(); // Keep only tree structure in memory nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1)); this.nodes = new Rectangle[nodeCount]; this.dataOffset = new int[nodeCount + 1]; for (int node_id = 0; node_id < nodeCount; node_id++) { this.dataOffset[node_id] = in.readInt(); this.nodes[node_id] = new Rectangle(); this.nodes[node_id].readFields(in); } this.dataOffset[nodeCount] = treeSize; if (in instanceof FSDataInputStream) { // A random input stream, can keep the data on disk this.data = (FSDataInputStream) in; } else { // A sequential input stream, need to read all data now int treeDataSize = this.dataOffset[nodeCount] - this.dataOffset[0]; // Adjust the offset of data to be zero this.treeStartOffset = -this.dataOffset[0]; byte[] treeData = new byte[treeDataSize]; in.readFully(treeData, 0, treeDataSize); this.data = new FSDataInputStream(new MemoryInputStream(treeData)); } leafNodeCount = (int) Math.pow(degree, height - 1); nonLeafNodeCount = nodeCount - leafNodeCount; }
From source file:org.apache.geode.internal.InternalDataSerializer.java
/** * Reads an instance of {@code String} from a {@code DataInput} given the header byte already * being read. The return value may be {@code null}. * * @throws IOException A problem occurs while reading from {@code in} * * @since GemFire 5.7/*from www .ja v a 2s. c om*/ */ public static String readString(DataInput in, byte header) throws IOException { if (header == DSCODE.STRING_BYTES) { int len = in.readUnsignedShort(); if (logger.isTraceEnabled(LogMarker.SERIALIZER)) { logger.trace(LogMarker.SERIALIZER, "Reading STRING_BYTES of len={}", len); } byte[] buf = new byte[len]; in.readFully(buf, 0, len); return new String(buf, 0); // intentionally using deprecated constructor } else if (header == DSCODE.STRING) { if (logger.isTraceEnabled(LogMarker.SERIALIZER)) { logger.trace(LogMarker.SERIALIZER, "Reading utf STRING"); } return in.readUTF(); } else if (header == DSCODE.NULL_STRING) { if (logger.isTraceEnabled(LogMarker.SERIALIZER)) { logger.trace(LogMarker.SERIALIZER, "Reading NULL_STRING"); } return null; } else if (header == DSCODE.HUGE_STRING_BYTES) { int len = in.readInt(); if (logger.isTraceEnabled(LogMarker.SERIALIZER)) { logger.trace(LogMarker.SERIALIZER, "Reading HUGE_STRING_BYTES of len={}", len); } byte[] buf = new byte[len]; in.readFully(buf, 0, len); return new String(buf, 0); // intentionally using deprecated constructor } else if (header == DSCODE.HUGE_STRING) { int len = in.readInt(); if (logger.isTraceEnabled(LogMarker.SERIALIZER)) { logger.trace(LogMarker.SERIALIZER, "Reading HUGE_STRING of len={}", len); } char[] buf = new char[len]; for (int i = 0; i < len; i++) { buf[i] = in.readChar(); } return new String(buf); } else { String s = "Unknown String header " + header; throw new IOException(s); } }
From source file:org.apache.hadoop.hbase.ipc.IPCUtil.java
/** * Read in chunks of 8K (HBASE-7239)/*from w w w .j a va 2 s . c o m*/ * @param in * @param dest * @param offset * @param len * @throws IOException */ static void readChunked(final DataInput in, byte[] dest, int offset, int len) throws IOException { int maxRead = 8192; for (; offset < len; offset += maxRead) { in.readFully(dest, offset, Math.min(len - offset, maxRead)); } }