List of usage examples for java.io DataInput readInt
int readInt() throws IOException;
From source file:com.marklogic.tree.ExpandedTree.java
@Override public void readFields(DataInput in) throws IOException { uriKey = in.readLong();/* w w w . j a v a2 s . c om*/ uniqKey = in.readLong(); linkKey = in.readLong(); numKeys = in.readInt(); if (numKeys > 0) { keys = new long[numKeys]; for (int i = 0; i < numKeys; i++) { keys[i] = in.readLong(); } } int atomDataLen = in.readInt(); if (atomDataLen > 0) { atomData = new byte[atomDataLen]; for (int i = 0; i < atomDataLen; i++) { atomData[i] = in.readByte(); } } atomLimit = in.readInt(); if (atomLimit > 0) { atomIndex = new int[atomLimit + 1]; for (int i = 0; i < atomLimit + 1; i++) { atomIndex[i] = in.readInt(); } } int nodeNameNameAtomLen = in.readInt(); if (nodeNameNameAtomLen > 0) { nodeNameNameAtom = new int[nodeNameNameAtomLen]; nodeNameNamespaceAtom = new int[nodeNameNameAtomLen]; for (int i = 0; i < nodeNameNameAtomLen; i++) { nodeNameNameAtom[i] = in.readInt(); nodeNameNamespaceAtom[i] = in.readInt(); } } numNodeReps = in.readInt(); if (numNodeReps > 0) { nodes = new NodeImpl[numNodeReps]; nodeOrdinal = new long[numNodeReps]; nodeKind = new byte[numNodeReps]; nodeRepID = new int[numNodeReps]; nodeParentNodeRepID = new int[numNodeReps]; for (int i = 0; i < numNodeReps; i++) { nodeOrdinal[i] = in.readLong(); nodeKind[i] = in.readByte(); nodeRepID[i] = in.readInt(); nodeParentNodeRepID[i] = in.readInt(); } } int numElemNodeReps = in.readInt(); if (numElemNodeReps > 0) { elemNodeNodeNameRepID = new int[numElemNodeReps]; elemNodeAttrNodeRepID = new int[numElemNodeReps]; elemNodeChildNodeRepID = new int[numElemNodeReps]; elemNodeElemDeclRepID = new int[numElemNodeReps]; elemNodeNumAttributes = new int[numElemNodeReps]; elemNodeNumDefaultAttrs = new int[numElemNodeReps]; elemNodeNumChildren = new int[numElemNodeReps]; elemNodeFlags = new int[numElemNodeReps]; for (int i = 0; i < numElemNodeReps; i++) { elemNodeNodeNameRepID[i] = in.readInt(); elemNodeAttrNodeRepID[i] = in.readInt(); elemNodeChildNodeRepID[i] = in.readInt(); elemNodeElemDeclRepID[i] = in.readInt(); elemNodeNumAttributes[i] = in.readInt(); elemNodeNumDefaultAttrs[i] = in.readInt(); elemNodeNumChildren[i] = in.readInt(); elemNodeFlags[i] = in.readInt(); } } int numAttrNodeReps = in.readInt(); if (numAttrNodeReps > 0) { attrNodeNodeNameRepID = new int[numAttrNodeReps]; attrNodeTextRepID = new int[numAttrNodeReps]; attrNodeAttrDeclRepID = new int[numAttrNodeReps]; for (int i = 0; i < numAttrNodeReps; i++) { attrNodeNodeNameRepID[i] = in.readInt(); attrNodeTextRepID[i] = in.readInt(); attrNodeAttrDeclRepID[i] = in.readInt(); } } numLinkNodeReps = in.readInt(); if (numLinkNodeReps > 0) { linkNodeKey = new long[numLinkNodeReps]; linkNodeNodeCount = new long[numLinkNodeReps]; linkNodeNodeNameRepID = new int[numLinkNodeReps]; linkNodeNodeRepID = new int[numLinkNodeReps]; for (int i = 0; i < numLinkNodeReps; i++) { linkNodeKey[i] = in.readLong(); linkNodeNodeCount[i] = in.readLong(); linkNodeNodeNameRepID[i] = in.readInt(); linkNodeNodeRepID[i] = in.readInt(); } } int numDocNodeReps = in.readInt(); if (numDocNodeReps > 0) { docNodeTextRepID = new int[numDocNodeReps]; docNodeChildNodeRepID = new int[numDocNodeReps]; docNodeNumChildren = new int[numDocNodeReps]; for (int i = 0; i < numDocNodeReps; i++) { docNodeTextRepID[i] = in.readInt(); docNodeChildNodeRepID[i] = in.readInt(); docNodeNumChildren[i] = in.readInt(); } } int numPINodeReps = in.readInt(); if (numPINodeReps > 0) { piNodeTargetAtom = new int[numPINodeReps]; piNodeTextRepID = new int[numPINodeReps]; for (int i = 0; i < numPINodeReps; i++) { piNodeTargetAtom[i] = in.readInt(); piNodeTextRepID[i] = in.readInt(); } } numNSNodeReps = in.readInt(); if (numNSNodeReps > 0) { nsNodeOrdinal = new long[numNSNodeReps]; nsNodePrevNSNodeRepID = new int[numNSNodeReps]; nsNodePrefixAtom = new int[numNSNodeReps]; nsNodeUriAtom = new int[numNSNodeReps]; for (int i = 0; i < numNSNodeReps; i++) { nsNodeOrdinal[i] = in.readLong(); nsNodePrevNSNodeRepID[i] = in.readInt(); nsNodePrefixAtom[i] = in.readInt(); nsNodeUriAtom[i] = in.readInt(); } } // skip permission node since it's not exposed to the API uriTextRepID = in.readInt(); colsTextRepID = in.readInt(); numTextReps = in.readInt(); if (numTextReps > 0) { textReps = new int[numTextReps]; for (int i = 0; i < numTextReps; i++) { textReps[i] = in.readInt(); } } int numArrayNodeReps = in.readInt(); if (numArrayNodeReps > 0) { arrayNodeTextRepID = new int[numArrayNodeReps]; arrayNodeChildNodeRepID = new int[numArrayNodeReps]; arrayNodeNumChildren = new int[numArrayNodeReps]; for (int i = 0; i < numArrayNodeReps; i++) { arrayNodeTextRepID[i] = in.readInt(); arrayNodeChildNodeRepID[i] = in.readInt(); arrayNodeNumChildren[i] = in.readInt(); } } int numDoubles = in.readInt(); if (numDoubles > 0) { doubles = new double[numDoubles]; for (int i = 0; i < numDoubles; i++) { doubles[i] = in.readDouble(); } } }
From source file:org.apache.carbondata.core.metadata.blocklet.BlockletInfo.java
@Override public void readFields(DataInput input) throws IOException { dimensionOffset = input.readLong();// w w w .j a va 2 s .c o m measureOffsets = input.readLong(); int dimensionChunkOffsetsSize = input.readShort(); dimensionChunkOffsets = new ArrayList<>(dimensionChunkOffsetsSize); for (int i = 0; i < dimensionChunkOffsetsSize; i++) { dimensionChunkOffsets.add(input.readLong()); } dimensionChunksLength = new ArrayList<>(dimensionChunkOffsetsSize); for (int i = 0; i < dimensionChunkOffsetsSize; i++) { dimensionChunksLength.add(input.readInt()); } short measureChunkOffsetsSize = input.readShort(); measureChunkOffsets = new ArrayList<>(measureChunkOffsetsSize); for (int i = 0; i < measureChunkOffsetsSize; i++) { measureChunkOffsets.add(input.readLong()); } measureChunksLength = new ArrayList<>(measureChunkOffsetsSize); for (int i = 0; i < measureChunkOffsetsSize; i++) { measureChunksLength.add(input.readInt()); } readChunkInfoForOlderVersions(input); final boolean isSortedPresent = input.readBoolean(); if (isSortedPresent) { this.isSorted = input.readBoolean(); } numberOfRowsPerPage = new int[input.readShort()]; for (int i = 0; i < numberOfRowsPerPage.length; i++) { numberOfRowsPerPage[i] = input.readInt(); } }
From source file:org.apache.mahout.text.LuceneStorageConfiguration.java
@Override public void readFields(DataInput in) throws IOException { try {/*from www .ja v a 2 s .co m*/ sequenceFilesOutputPath = new Path(in.readUTF()); indexPaths = Lists.newArrayList(); String[] indexPaths = in.readUTF().split(SEPARATOR_PATHS); for (String indexPath : indexPaths) { this.indexPaths.add(new Path(indexPath)); } idField = in.readUTF(); fields = Arrays.asList(in.readUTF().split(SEPARATOR_FIELDS)); query = new QueryParser(LUCENE_46, "query", new StandardAnalyzer(LUCENE_46)).parse(in.readUTF()); maxHits = in.readInt(); } catch (ParseException e) { throw new RuntimeException("Could not deserialize " + this.getClass().getName(), e); } }
From source file:com.chinamobile.bcbsp.bspcontroller.Counters.java
@Override public synchronized void readFields(DataInput in) throws IOException { int numClasses = in.readInt(); counters.clear();//from w w w . ja v a 2 s . c om while (numClasses-- > 0) { String groupName = Text.readString(in); Group group = new Group(groupName); group.readFields(in); counters.put(groupName, group); } }
From source file:org.apache.hawq.pxf.service.io.GPDBWritable.java
private int readPktLen(DataInput in) throws IOException { pktlen = EOF;/*from w w w . j a v a 2 s . c o m*/ try { pktlen = in.readInt(); } catch (EOFException e) { LOG.debug("Reached end of stream (EOFException)"); return EOF; } if (pktlen == EOF) { LOG.debug("Reached end of stream (returned -1)"); } return pktlen; }
From source file:com.fiorano.openesb.application.DmiObject.java
/** * This method reads the specified input stream and returns it as a <code>String</code> * object.//from ww w . j a v a 2 s.c o m * * @param is input stream * @return String * @exception IOException IOException */ protected String readUTF(DataInput is) throws IOException { int length = is.readInt(); if (length == -1) return null; byte[] buff = new byte[length]; is.readFully(buff); return new String(buff, "UTF-8"); }
From source file:com.ricemap.spateDB.core.RTree.java
/** * Returns the total size of the header (including the index) in bytes. * Assume that the input is aligned to the start offset of the tree * (header). Note that the part of the header is consumed from the given * input to be able to determine header size. * /* w w w. j ava2 s .c om*/ * @param in * @return * @throws IOException */ public static int getHeaderSize(DataInput in) throws IOException { int header_size = 0; /* int treeSize = */in.readInt(); header_size += 4; int height = in.readInt(); header_size += 4; if (height == 0) { // Empty tree. No results return header_size; } int degree = in.readInt(); header_size += 4; int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); /* int elementCount = */in.readInt(); header_size += 4; // Add the size of all nodes header_size += nodeCount * NodeSize; return header_size; }
From source file:org.apache.hama.graph.Vertex.java
@Override public void readFields(DataInput in) throws IOException { if (in.readBoolean()) { if (this.vertexID == null) { this.vertexID = GraphJobRunner.createVertexIDObject(); }//from w w w . j a v a2s. com this.vertexID.readFields(in); } if (in.readBoolean()) { if (this.value == null) { this.value = GraphJobRunner.createVertexValue(); } this.value.readFields(in); } this.edges = new ArrayList<Edge<V, E>>(); if (in.readBoolean()) { int num = in.readInt(); if (num > 0) { for (int i = 0; i < num; ++i) { V vertex = GraphJobRunner.createVertexIDObject(); vertex.readFields(in); E edgeCost = null; if (in.readBoolean()) { edgeCost = GraphJobRunner.createEdgeCostObject(); edgeCost.readFields(in); } Edge<V, E> edge = new Edge<V, E>(vertex, edgeCost); this.edges.add(edge); } } } votedToHalt = in.readBoolean(); readState(in); }
From source file:dk.statsbiblioteket.util.LineReaderTest.java
public void testSample(String type, DataInput in) throws Exception { assertEquals("Int 1 should work for " + type, 12345, in.readInt()); assertEquals("Int 2 should work for " + type, -87, in.readInt()); assertEquals("Long should work for " + type, 123456789L, in.readLong()); assertEquals("String 1 should work for " + type, "Hello World!", in.readLine()); assertEquals("String 2 should work for " + type, "Another world", in.readLine()); assertEquals("Float should work for " + type, 0.5f, in.readFloat()); assertEquals("Boolean 1 should work for " + type, true, in.readBoolean()); assertEquals("Boolean 2 should work for " + type, false, in.readBoolean()); assertEquals("Byte 1 should work for " + type, (byte) 12, in.readByte()); assertEquals("Byte 2 should work for " + type, (byte) -12, in.readByte()); assertEquals("Unsigned byte should work for " + type, 129, in.readUnsignedByte()); assertEquals("Short should work for " + type, -4567, in.readShort()); byte[] loaded = new byte[5]; byte[] expected = new byte[] { (byte) 'A', (byte) 'S', (byte) 'C', (byte) 'I', (byte) 'I' }; in.readFully(loaded);//from w ww .ja va 2s. c o m for (int i = 0; i < loaded.length; i++) { assertEquals("Byte-stored string should be equal at byte " + i + " for " + type, expected[i], loaded[i]); } }
From source file:org.apache.nutch.crawl.CrawlDatum.java
public void readFields(DataInput in) throws IOException { byte version = in.readByte(); // read version if (version > CUR_VERSION) // check version throw new VersionMismatchException(CUR_VERSION, version); status = in.readByte();/* w ww.j av a 2s.co m*/ fetchTime = in.readLong(); retries = in.readByte(); if (version > 5) { fetchInterval = in.readInt(); } else fetchInterval = Math.round(in.readFloat()); score = in.readFloat(); if (version > 2) { modifiedTime = in.readLong(); int cnt = in.readByte(); if (cnt > 0) { signature = new byte[cnt]; in.readFully(signature); } else signature = null; } if (version > 3) { boolean hasMetadata = false; if (version < 7) { org.apache.hadoop.io.MapWritable oldMetaData = new org.apache.hadoop.io.MapWritable(); if (in.readBoolean()) { hasMetadata = true; metaData = new org.apache.hadoop.io.MapWritable(); oldMetaData.readFields(in); } for (Writable key : oldMetaData.keySet()) { metaData.put(key, oldMetaData.get(key)); } } else { if (in.readBoolean()) { hasMetadata = true; metaData = new org.apache.hadoop.io.MapWritable(); metaData.readFields(in); } } if (hasMetadata == false) metaData = null; } // translate status codes if (version < 5) { if (oldToNew.containsKey(status)) status = oldToNew.get(status); else status = STATUS_DB_UNFETCHED; } }