List of usage examples for java.io DataInput readInt
int readInt() throws IOException;
From source file:com.fiorano.openesb.application.aps.ApplicationContext.java
/** * This method reads this <code>ApplicationContexts</code> object from the * specified input stream object.//from w w w . j a v a 2 s . c o m * * @param is DataInput object * @param versionNo * @exception IOException if an error occurs while reading bytes or while * converting them into specified Java primitive type. * @since Tifosi2.0 */ public void fromStream(DataInput is, int versionNo) throws IOException { super.fromStream(is, versionNo); String temp = UTFReaderWriter.readUTF(is); if (temp.equals("")) m_structure = null; else m_structure = temp; temp = UTFReaderWriter.readUTF(is); if (temp.equals("")) m_defaultInstance = null; else m_defaultInstance = temp; temp = UTFReaderWriter.readUTF(is); if (temp.equals("")) m_rootElement = null; else m_rootElement = temp; temp = UTFReaderWriter.readUTF(is); if (temp.equals("")) m_rootElementNamespace = null; else m_rootElementNamespace = temp; m_structureType = is.readInt(); }
From source file:org.apache.pig.data.BinInterSedes.java
private InternalMap readInternalMap(DataInput in) throws IOException { int size = in.readInt(); InternalMap m = new InternalMap(size); for (int i = 0; i < size; i++) { Object key = readDatum(in); m.put(key, readDatum(in));//from w w w . j av a2 s. c o m } return m; }
From source file:org.apache.hama.graph.GraphJobMessage.java
@Override public void readFields(DataInput in) throws IOException { flag = in.readByte();//ww w.j a va 2s . co m if (isVertexMessage()) { vertexId = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); vertexId.readFields(in); vertexValue = ReflectionUtils.newInstance(VERTEX_VALUE_CLASS, null); vertexValue.readFields(in); } else if (isMapMessage()) { map = new MapWritable(); map.readFields(in); } else if (isPartitioningMessage()) { //LOG.info("??? vertex"); Vertex<Writable, Writable, Writable> vertex = GraphJobRunner.newVertexInstance(VERTEX_CLASS, null); Writable vertexId = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); vertexId.readFields(in); //LOG.info(" " + vertexId.toString()); vertex.setVertexID(vertexId); if (in.readBoolean()) { Writable vertexValue = ReflectionUtils.newInstance(VERTEX_VALUE_CLASS, null); vertexValue.readFields(in); //LOG.info("" + vertexValue); vertex.setValue(vertexValue); } int size = in.readInt(); //LOG.info("?: " + size); vertex.setEdges(new ArrayList<Edge<Writable, Writable>>(size)); for (int i = 0; i < size; i++) { Writable edgeVertexID = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); edgeVertexID.readFields(in); //LOG.info(" " + edgeVertexID); Writable edgeValue = null; if (in.readBoolean()) { edgeValue = ReflectionUtils.newInstance(EDGE_VALUE_CLASS, null); edgeValue.readFields(in); } //LOG.info(": " + edgeValue); vertex.getEdges().add(new Edge<Writable, Writable>(edgeVertexID, edgeValue)); } this.vertex = vertex; //LOG.info("Vertex" + vertex); } else if (isVerticesSizeMessage()) { vertices_size = new IntWritable(); vertices_size.readFields(in); } else if (isBoundaryVertexSizeMessage()) { boundaryVertex_size = new IntWritable(); boundaryVertex_size.readFields(in); } else { vertexId = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); vertexId.readFields(in); } }
From source file:org.apache.hadoop.hbase.HTableDescriptor.java
/** * <em> INTERNAL </em> This method is a part of {@link WritableComparable} interface * and is used for de-serialization of the HTableDescriptor over RPC * @deprecated Writables are going away. Use pb {@link #parseFrom(byte[])} instead. *//*from ww w . j a v a 2 s.c o m*/ @Deprecated @Override public void readFields(DataInput in) throws IOException { int version = in.readInt(); if (version < 3) throw new IOException("versions < 3 are not supported (and never existed!?)"); // version 3+ name = TableName.valueOf(Bytes.readByteArray(in)); setRootRegion(in.readBoolean()); setMetaRegion(in.readBoolean()); values.clear(); configuration.clear(); int numVals = in.readInt(); for (int i = 0; i < numVals; i++) { ImmutableBytesWritable key = new ImmutableBytesWritable(); ImmutableBytesWritable value = new ImmutableBytesWritable(); key.readFields(in); value.readFields(in); setValue(key, value); } families.clear(); int numFamilies = in.readInt(); for (int i = 0; i < numFamilies; i++) { HColumnDescriptor c = new HColumnDescriptor(); c.readFields(in); families.put(c.getName(), c); } if (version >= 7) { int numConfigs = in.readInt(); for (int i = 0; i < numConfigs; i++) { ImmutableBytesWritable key = new ImmutableBytesWritable(); ImmutableBytesWritable value = new ImmutableBytesWritable(); key.readFields(in); value.readFields(in); configuration.put(Bytes.toString(key.get(), key.getOffset(), key.getLength()), Bytes.toString(value.get(), value.getOffset(), value.getLength())); } } }
From source file:org.apache.hadoop.mapred.Task.java
public void readFields(DataInput in) throws IOException { jobFile = Text.readString(in); taskId = TaskAttemptID.read(in);/* w w w .j av a 2s. c om*/ partition = in.readInt(); numSlotsRequired = in.readInt(); taskStatus.readFields(in); skipRanges.readFields(in); currentRecIndexIterator = skipRanges.skipRangeIterator(); currentRecStartIndex = currentRecIndexIterator.next(); skipping = in.readBoolean(); jobCleanup = in.readBoolean(); if (jobCleanup) { jobRunStateForCleanup = WritableUtils.readEnum(in, JobStatus.State.class); } jobSetup = in.readBoolean(); writeSkipRecs = in.readBoolean(); taskCleanup = in.readBoolean(); if (taskCleanup) { setPhase(TaskStatus.Phase.CLEANUP); } user = Text.readString(in); }
From source file:com.fiorano.openesb.application.aps.Route.java
private void _readSelector(DataInput is, int versionNo) throws IOException { int size = is.readInt(); for (int i = 0; i < size; i++) { String type = UTFReaderWriter.readUTF(is); if (!(type.equalsIgnoreCase(MESSAGE_BODY_XPATH) || type.equalsIgnoreCase(APP_CONTEXT_XPATH))) { if (type.equals("")) type = null;// w ww.j a v a2s.co m String value = UTFReaderWriter.readUTF(is); if (value.equals("")) value = null; addSelector(type, value); } else { XPathDmi dmi = new XPathDmi(); dmi.fromStream(is, versionNo); if (m_selectors == null) m_selectors = new HashMap(); m_selectors.put(type, dmi); } } }
From source file:org.apache.sysml.runtime.matrix.data.FrameBlock.java
@Override public void readFields(DataInput in) throws IOException { //read head (rows, cols) _numRows = in.readInt(); int numCols = in.readInt(); boolean isDefaultMeta = in.readBoolean(); //allocate schema/meta data arrays _schema = (_schema != null && _schema.length == numCols) ? _schema : new ValueType[numCols]; _colnames = (_colnames != null && _colnames.length == numCols) ? _colnames : new String[numCols]; _colmeta = (_colmeta != null && _colmeta.length == numCols) ? _colmeta : new ColumnMetadata[numCols]; _coldata = (_coldata != null && _coldata.length == numCols) ? _coldata : new Array[numCols]; //read columns (value type, meta, data) for (int j = 0; j < numCols; j++) { ValueType vt = ValueType.values()[in.readByte()]; String name = isDefaultMeta ? createColName(j) : in.readUTF(); long ndistinct = isDefaultMeta ? 0 : in.readLong(); String mvvalue = isDefaultMeta ? null : in.readUTF(); Array arr = null;/* w w w. j av a 2 s . c o m*/ switch (vt) { case STRING: arr = new StringArray(new String[_numRows]); break; case BOOLEAN: arr = new BooleanArray(new boolean[_numRows]); break; case INT: arr = new LongArray(new long[_numRows]); break; case DOUBLE: arr = new DoubleArray(new double[_numRows]); break; default: throw new IOException("Unsupported value type: " + vt); } arr.readFields(in); _schema[j] = vt; _colnames[j] = name; _colmeta[j] = new ColumnMetadata(ndistinct, (mvvalue == null || mvvalue.isEmpty()) ? null : mvvalue); _coldata[j] = arr; } }
From source file:de.hpi.fgis.hdrs.Triple.java
@Override public void readFields(DataInput in) throws IOException { // read header Slen = in.readShort();// www .j a v a 2 s . co m Plen = in.readShort(); Olen = in.readInt(); multiplicity = in.readInt(); // read data //int size = Slen + (int) Plen + Olen; int size = bufferSize(); buffer = new byte[size]; in.readFully(buffer, 0, size); }
From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java
private void syncConnection(final Socket connection, final int readTimeout) { try {/*w w w . j a v a2 s. c o m*/ final CRC32 crc32 = new CRC32(); final DataOutput output = new DataOutputStream(connection.getOutputStream()); final DataInput input = new DataInputStream(new CheckedInputStream(connection.getInputStream(), crc32)); if (input.readByte() != INIT) { return; } final LogRange logFileRange = Util.logFileRange(); final long lastId = logFileRange.noLogFile() ? -1 : logFileRange.getLast(); output.writeLong(lastId); do { if (input.readByte() != RECOVERY_LOG) { return; } crc32.reset(); final long logId = input.readLong(); final File file = Util.tmpLogFile(logId); LOG.info("syncing recovery file: " + file.getName()); final BufferedOutputStream fileOutput = new BufferedOutputStream(new FileOutputStream(file)); final byte[] buffer = new byte[8092]; int length; while ((length = input.readInt()) > 0) { input.readFully(buffer, 0, length); fileOutput.write(buffer, 0, length); } fileOutput.close(); final long calculatedChecksum = crc32.getValue(); final long sentChecksum = input.readLong(); if (calculatedChecksum != sentChecksum) { throw new NoSqlStoreException("Checksum didn't match during download of " + file.getName()); } recover(file); final File renameTo = Util.logFile(logId); file.renameTo(renameTo); } while (true); } catch (final NoSqlStoreException e) { LOG.error("file server failure", e); } catch (final IOException e) { LOG.error("networking failure", e); } catch (final RuntimeException e) { LOG.error("request failure", e); } finally { try { connection.close(); } catch (final IOException e) { LOG.warn("failure to close connection", e); } } // TODO restart }
From source file:org.springframework.session.data.gemfire.AbstractGemFireOperationsSessionRepositoryTest.java
@Test public void sessionAttributesFromData() throws Exception { final DataInput mockDataInput = mock(DataInput.class); given(mockDataInput.readInt()).willReturn(2); given(mockDataInput.readUTF()).willReturn("attrOne").willReturn("attrTwo"); @SuppressWarnings("serial") AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes sessionAttributes = new AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes() { private int count = 0; @Override/*w w w .j av a2s. c o m*/ @SuppressWarnings("unchecked") <T> T readObject(DataInput in) throws ClassNotFoundException, IOException { assertThat(in).isSameAs(mockDataInput); return (T) Arrays.asList("testOne", "testTwo").get(count++); } }; assertThat(sessionAttributes.getAttributeNames().isEmpty()).isTrue(); sessionAttributes.fromData(mockDataInput); assertThat(sessionAttributes.getAttributeNames().size()).isEqualTo(2); assertThat(sessionAttributes.getAttributeNames().containsAll(asSet("attrOne", "attrTwo"))).isTrue(); assertThat(String.valueOf(sessionAttributes.getAttribute("attrOne"))).isEqualTo("testOne"); assertThat(String.valueOf(sessionAttributes.getAttribute("attrTwo"))).isEqualTo("testTwo"); verify(mockDataInput, times(1)).readInt(); verify(mockDataInput, times(2)).readUTF(); }