List of usage examples for java.io DataInput readInt
int readInt() throws IOException;
From source file:org.apache.pig.data.BinInterSedes.java
private Map<String, Object> readMap(DataInput in, byte type) throws IOException { int size;/*from www. j a va 2s . c o m*/ switch (type) { case TINYMAP: size = in.readUnsignedByte(); break; case SMALLMAP: size = in.readUnsignedShort(); break; case MAP: size = in.readInt(); break; default: { int errCode = 2220; String msg = "Unexpected data while reading map" + "from binary file."; throw new ExecException(msg, errCode, PigException.BUG); } } Map<String, Object> m = new HashMap<String, Object>(size); for (int i = 0; i < size; i++) { String key = (String) readDatum(in); m.put(key, readDatum(in)); } return m; }
From source file:edu.umn.cs.spatialHadoop.core.RTree.java
@Override public void readFields(DataInput in) throws IOException { // Tree size (Header + structure + data) treeSize = in.readInt(); if (treeSize == 0) { height = elementCount = 0;//from w ww . j ava 2s .c om return; } // Read only the tree structure in memory while actual records remain on // disk and loaded when necessary height = in.readInt(); if (height == 0) return; degree = in.readInt(); elementCount = in.readInt(); // Keep only tree structure in memory nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1)); int structureSize = nodeCount * NodeSize; byte[] treeStructure = new byte[structureSize]; in.readFully(treeStructure, 0, structureSize); structure = new FSDataInputStream(new MemoryInputStream(treeStructure)); if (in instanceof FSDataInputStream) { this.treeStartOffset = ((FSDataInputStream) in).getPos() - structureSize - TreeHeaderSize; this.data = (FSDataInputStream) in; } else { // Load all tree data in memory this.treeStartOffset = 0 - structureSize - TreeHeaderSize; int treeDataSize = treeSize - TreeHeaderSize - structureSize; byte[] treeData = new byte[treeDataSize]; in.readFully(treeData, 0, treeDataSize); this.data = new FSDataInputStream(new MemoryInputStream(treeData)); } nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); leafNodeCount = (int) Math.pow(degree, height - 1); nonLeafNodeCount = nodeCount - leafNodeCount; }
From source file:org.apache.hadoop.hbase.KeyValue.java
/** * @param in Where to read bytes from. Creates a byte array to hold the KeyValue * backing bytes copied from the steam.//w ww .ja v a2 s .c o m * @return KeyValue created by deserializing from <code>in</code> OR if we find a length * of zero, we will return null which can be useful marking a stream as done. * @throws IOException */ public static KeyValue create(final DataInput in) throws IOException { return create(in.readInt(), in); }
From source file:org.springframework.session.data.gemfire.AbstractGemFireOperationsSessionRepositoryTest.java
@Test public void sessionAttributesFromDelta() throws Exception { final DataInput mockDataInput = mock(DataInput.class); given(mockDataInput.readInt()).willReturn(2); given(mockDataInput.readUTF()).willReturn("attrOne").willReturn("attrTwo"); @SuppressWarnings("serial") AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes sessionAttributes = new AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes() { private int count = 0; @Override/*from w w w .j av a2 s.c om*/ @SuppressWarnings("unchecked") <T> T readObject(DataInput in) throws ClassNotFoundException, IOException { assertThat(in).isSameAs(mockDataInput); return (T) Arrays.asList("testOne", "testTwo", "testThree").get(count++); } }; sessionAttributes.setAttribute("attrOne", "one"); sessionAttributes.setAttribute("attrTwo", "two"); assertThat(sessionAttributes.getAttributeNames().size()).isEqualTo(2); assertThat(sessionAttributes.getAttributeNames().containsAll(asSet("attrOne", "attrTwo"))).isTrue(); assertThat(String.valueOf(sessionAttributes.getAttribute("attrOne"))).isEqualTo("one"); assertThat(String.valueOf(sessionAttributes.getAttribute("attrTwo"))).isEqualTo("two"); assertThat(sessionAttributes.hasDelta()).isTrue(); sessionAttributes.fromDelta(mockDataInput); assertThat(sessionAttributes.getAttributeNames().size()).isEqualTo(2); assertThat(sessionAttributes.getAttributeNames().containsAll(asSet("attrOne", "attrTwo"))).isTrue(); assertThat(String.valueOf(sessionAttributes.getAttribute("attrOne"))).isEqualTo("testOne"); assertThat(String.valueOf(sessionAttributes.getAttribute("attrTwo"))).isEqualTo("testTwo"); assertThat(sessionAttributes.hasDelta()).isFalse(); verify(mockDataInput, times(1)).readInt(); verify(mockDataInput, times(2)).readUTF(); reset(mockDataInput); given(mockDataInput.readInt()).willReturn(1); given(mockDataInput.readUTF()).willReturn("attrTwo"); sessionAttributes.setAttribute("attrOne", "one"); sessionAttributes.setAttribute("attrTwo", "two"); assertThat(sessionAttributes.getAttributeNames().size()).isEqualTo(2); assertThat(sessionAttributes.getAttributeNames().containsAll(asSet("attrOne", "attrTwo"))).isTrue(); assertThat(String.valueOf(sessionAttributes.getAttribute("attrOne"))).isEqualTo("one"); assertThat(String.valueOf(sessionAttributes.getAttribute("attrTwo"))).isEqualTo("two"); assertThat(sessionAttributes.hasDelta()).isTrue(); sessionAttributes.fromDelta(mockDataInput); assertThat(sessionAttributes.getAttributeNames().size()).isEqualTo(2); assertThat(sessionAttributes.getAttributeNames().containsAll(asSet("attrOne", "attrTwo"))).isTrue(); assertThat(String.valueOf(sessionAttributes.getAttribute("attrOne"))).isEqualTo("one"); assertThat(String.valueOf(sessionAttributes.getAttribute("attrTwo"))).isEqualTo("testThree"); assertThat(sessionAttributes.hasDelta()).isTrue(); verify(mockDataInput, times(1)).readInt(); verify(mockDataInput, times(1)).readUTF(); }
From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java
@Override public void readFields(DataInput in) throws IOException { // Read the whole tree structure and keep it in memory. Leave data on disk // Tree size (Header + structure + data) treeSize = in.readInt(); if (in instanceof Seekable) this.treeStartOffset = ((Seekable) in).getPos(); if (treeSize == 0) { height = elementCount = 0;// w ww.j ava2 s . c o m return; } // Read only the tree structure in memory while actual records remain on // disk and loaded when necessary height = in.readInt(); if (height == 0) return; degree = in.readInt(); elementCount = in.readInt(); // Keep only tree structure in memory nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1)); this.nodes = new Rectangle[nodeCount]; this.dataOffset = new int[nodeCount + 1]; for (int node_id = 0; node_id < nodeCount; node_id++) { this.dataOffset[node_id] = in.readInt(); this.nodes[node_id] = new Rectangle(); this.nodes[node_id].readFields(in); } this.dataOffset[nodeCount] = treeSize; if (in instanceof FSDataInputStream) { // A random input stream, can keep the data on disk this.data = (FSDataInputStream) in; } else { // A sequential input stream, need to read all data now int treeDataSize = this.dataOffset[nodeCount] - this.dataOffset[0]; // Adjust the offset of data to be zero this.treeStartOffset = -this.dataOffset[0]; byte[] treeData = new byte[treeDataSize]; in.readFully(treeData, 0, treeDataSize); this.data = new FSDataInputStream(new MemoryInputStream(treeData)); } leafNodeCount = (int) Math.pow(degree, height - 1); nonLeafNodeCount = nodeCount - leafNodeCount; }
From source file:org.apache.pig.data.BinInterSedes.java
private Tuple readSchemaTuple(DataInput in, byte type) throws IOException { int id;/*from w ww. j a va 2 s . c o m*/ switch (type) { case (SCHEMA_TUPLE_BYTE_INDEX): id = in.readUnsignedByte(); break; case (SCHEMA_TUPLE_SHORT_INDEX): id = in.readUnsignedShort(); break; case (SCHEMA_TUPLE): id = in.readInt(); break; default: throw new RuntimeException("Invalid type given to readSchemaTuple: " + type); } Tuple st = SchemaTupleFactory.getInstance(id).newTuple(); st.readFields(in); return st; }
From source file:org.springframework.session.data.gemfire.AbstractGemFireOperationsSessionRepositoryTest.java
@Test public void sessionFromDelta() throws Exception { final DataInput mockDataInput = mock(DataInput.class); given(mockDataInput.readLong()).willReturn(1L); given(mockDataInput.readInt()).willReturn(600).willReturn(0); @SuppressWarnings("serial") AbstractGemFireOperationsSessionRepository.GemFireSession session = new AbstractGemFireOperationsSessionRepository.GemFireSession() { @Override/*from w ww . ja va2 s . com*/ @SuppressWarnings("unchecked") <T> T readObject(DataInput in) throws ClassNotFoundException, IOException { assertThat(in).isSameAs(mockDataInput); return (T) "test"; } }; session.fromDelta(mockDataInput); assertThat(session.hasDelta()).isFalse(); assertThat(session.getLastAccessedTime()).isEqualTo(1L); assertThat(session.getMaxInactiveIntervalInSeconds()).isEqualTo(600); assertThat(session.getAttributeNames().isEmpty()).isTrue(); verify(mockDataInput, times(1)).readLong(); verify(mockDataInput, times(2)).readInt(); verify(mockDataInput, never()).readUTF(); }
From source file:org.springframework.session.data.gemfire.AbstractGemFireOperationsSessionRepositoryTest.java
@Test public void sessionFromData() throws Exception { final long expectedCreationTime = 1L; final long expectedLastAccessedTime = 2L; final int expectedMaxInactiveIntervalInSeconds = (int) TimeUnit.HOURS.toSeconds(6); final String expectedPrincipalName = "jblum"; DataInput mockDataInput = mock(DataInput.class); given(mockDataInput.readUTF()).willReturn("2").willReturn(expectedPrincipalName); given(mockDataInput.readLong()).willReturn(expectedCreationTime).willReturn(expectedLastAccessedTime); given(mockDataInput.readInt()).willReturn(expectedMaxInactiveIntervalInSeconds); @SuppressWarnings("serial") AbstractGemFireOperationsSessionRepository.GemFireSession session = new AbstractGemFireOperationsSessionRepository.GemFireSession( "1") { @Override//w w w . jav a2 s .c o m @SuppressWarnings("unchecked") <T> T readObject(DataInput in) throws ClassNotFoundException, IOException { assertThat(in).isNotNull(); AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes sessionAttributes = new AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes(); sessionAttributes.setAttribute("attrOne", "testOne"); sessionAttributes.setAttribute("attrTwo", "testTwo"); return (T) sessionAttributes; } }; session.fromData(mockDataInput); Set<String> expectedAttributeNames = asSet("attrOne", "attrTwo", FindByIndexNameSessionRepository.PRINCIPAL_NAME_INDEX_NAME); assertThat(session.getId()).isEqualTo("2"); assertThat(session.getCreationTime()).isEqualTo(expectedCreationTime); assertThat(session.getLastAccessedTime()).isEqualTo(expectedLastAccessedTime); assertThat(session.getMaxInactiveIntervalInSeconds()).isEqualTo(expectedMaxInactiveIntervalInSeconds); assertThat(session.getPrincipalName()).isEqualTo(expectedPrincipalName); assertThat(session.getAttributeNames().size()).isEqualTo(3); assertThat(session.getAttributeNames().containsAll(expectedAttributeNames)).isTrue(); assertThat(String.valueOf(session.getAttribute("attrOne"))).isEqualTo("testOne"); assertThat(String.valueOf(session.getAttribute("attrTwo"))).isEqualTo("testTwo"); assertThat(String.valueOf(session.getAttribute(FindByIndexNameSessionRepository.PRINCIPAL_NAME_INDEX_NAME))) .isEqualTo(expectedPrincipalName); verify(mockDataInput, times(2)).readUTF(); verify(mockDataInput, times(2)).readLong(); verify(mockDataInput, times(2)).readInt(); }
From source file:com.mapr.hbase.support.objects.MHRegionInfo.java
@Override public void readFields(DataInput in) throws IOException { // Read the single version byte. We don't ask the super class do it // because freaks out if its not the current classes' version. This method // can deserialize version 0 and version 1 of HRI. byte version = in.readByte(); if (version == 0) { // This is the old HRI that carried an HTD. Migrate it. The below // was copied from the old 0.90 HRI readFields. this.endKey = Bytes.readByteArray(in); this.offLine = in.readBoolean(); this.regionId = in.readLong(); this.regionName = Bytes.readByteArray(in); this.regionNameStr = Bytes.toStringBinary(this.regionName); this.split = in.readBoolean(); this.startKey = Bytes.readByteArray(in); try {//from w w w. ja v a2s . c o m tableDesc = new HTableDescriptor(); tableDesc.readFields(in); this.tableName = tableDesc.getName(); } catch (EOFException eofe) { throw new IOException("HTD not found in input buffer", eofe); } this.hashCode = in.readInt(); } else if (getVersion() == version) { this.endKey = Bytes.readByteArray(in); this.offLine = in.readBoolean(); this.regionId = in.readLong(); this.regionName = Bytes.readByteArray(in); this.regionNameStr = Bytes.toStringBinary(this.regionName); this.split = in.readBoolean(); this.startKey = Bytes.readByteArray(in); this.tableName = Bytes.readByteArray(in); this.hashCode = in.readInt(); } else { throw new IOException("Non-migratable/unknown version=" + getVersion()); } }
From source file:bobs.is.compress.sevenzip.SevenZFile.java
private void readPackInfo(final DataInput header, final Archive archive) throws IOException { archive.packPos = readUint64(header); final long numPackStreams = readUint64(header); int nid = header.readUnsignedByte(); if (nid == NID.kSize) { archive.packSizes = new long[(int) numPackStreams]; for (int i = 0; i < archive.packSizes.length; i++) { archive.packSizes[i] = readUint64(header); }//from ww w . j a v a 2 s . co m nid = header.readUnsignedByte(); } if (nid == NID.kCRC) { archive.packCrcsDefined = readAllOrBits(header, (int) numPackStreams); archive.packCrcs = new long[(int) numPackStreams]; for (int i = 0; i < (int) numPackStreams; i++) { if (archive.packCrcsDefined.get(i)) { archive.packCrcs[i] = 0xffffFFFFL & Integer.reverseBytes(header.readInt()); } } nid = header.readUnsignedByte(); } if (nid != NID.kEnd) { throw new IOException("Badly terminated PackInfo (" + nid + ")"); } }