List of usage examples for java.io DataInput readInt
int readInt() throws IOException;
From source file:edu.umn.cs.spatialHadoop.visualization.FrequencyMap.java
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int length = in.readInt(); byte[] serializedData = new byte[length]; in.readFully(serializedData);// w w w .j av a 2 s . c o m ByteArrayInputStream bais = new ByteArrayInputStream(serializedData); GZIPInputStream gzis = new GZIPInputStream(bais); byte[] buffer = new byte[8]; gzis.read(buffer); ByteBuffer bbuffer = ByteBuffer.wrap(buffer); int width = bbuffer.getInt(); int height = bbuffer.getInt(); // Reallocate memory only if needed if (width != this.getWidth() || height != this.getHeight()) frequencies = new float[width][height]; buffer = new byte[getHeight() * 4]; for (int x = 0; x < getWidth(); x++) { int size = 0; while (size < buffer.length) { size += gzis.read(buffer, size, buffer.length - size); } bbuffer = ByteBuffer.wrap(buffer); for (int y = 0; y < getHeight(); y++) { frequencies[x][y] = bbuffer.getFloat(); } } }
From source file:org.apache.geode.cache.query.data.Portfolio.java
public void fromData(DataInput in) throws IOException, ClassNotFoundException { this.ID = in.readInt(); boolean isNull = DataSerializer.readPrimitiveBoolean(in); if (!isNull) { this.shortID = DataSerializer.readShort(in); }/*from w w w . java2 s .co m*/ this.pkid = DataSerializer.readString(in); this.position1 = (Position) DataSerializer.readObject(in); this.position2 = (Position) DataSerializer.readObject(in); this.positions = (HashMap) DataSerializer.readObject(in); this.collectionHolderMap = (HashMap) DataSerializer.readObject(in); this.type = DataSerializer.readString(in); this.status = DataSerializer.readString(in); this.names = DataSerializer.readStringArray(in); this.description = DataSerializer.readString(in); this.createTime = DataSerializer.readPrimitiveLong(in); this.createDate = DataSerializer.readDate(in); // Read Position3 int position3Size = in.readInt(); if (position3Size != 0) { this.position3 = new Position[position3Size]; for (int i = 0; i < position3Size; i++) { this.position3[i] = (Position) DataSerializer.readObject(in); } } this.indexKey = in.readInt(); }
From source file:com.nearinfinity.blur.mapreduce.BlurTask.java
@Override public void readFields(DataInput input) throws IOException { _maxRecordCount = input.readLong();// www . j av a 2 s .c o m _ramBufferSizeMB = input.readInt(); _optimize = input.readBoolean(); _indexingType = INDEXING_TYPE.valueOf(readString(input)); byte[] data = new byte[input.readInt()]; input.readFully(data); ByteArrayInputStream is = new ByteArrayInputStream(data); TIOStreamTransport trans = new TIOStreamTransport(is); TBinaryProtocol protocol = new TBinaryProtocol(trans); _tableDescriptor = new TableDescriptor(); try { _tableDescriptor.read(protocol); } catch (TException e) { throw new IOException(e); } }
From source file:com.aliyun.openservices.tablestore.hadoop.MultiCriteria.java
@Override public void readFields(DataInput in) throws IOException { byte tag = in.readByte(); if (tag != WritableConsts.MULTI_CRITERIA) { throw new IOException("broken input stream"); }//from w w w . ja v a2s . c o m List<RangeRowQueryCriteria> newCriteria = new ArrayList<RangeRowQueryCriteria>(); int sz = in.readInt(); for (int i = 0; i < sz; ++i) { newCriteria.add(RangeRowQueryCriteriaWritable.read(in).getRangeRowQueryCriteria()); } criteria = newCriteria; }
From source file:parquet.hadoop.ParquetInputSplit.java
/** * {@inheritDoc}/*from w w w . ja v a 2 s .co m*/ */ @Override public void readFields(DataInput in) throws IOException { super.readFields(in); int blocksSize = in.readInt(); this.blocks = new ArrayList<BlockMetaData>(blocksSize); for (int i = 0; i < blocksSize; i++) { blocks.add(readBlock(in)); } this.requestedSchema = decompressString(Text.readString(in)); this.fileSchema = decompressString(Text.readString(in)); this.extraMetadata = readKeyValues(in); this.readSupportMetadata = readKeyValues(in); }
From source file:com.chinamobile.bcbsp.sync.SuperStepCommand.java
@Override public void readFields(DataInput in) throws IOException { this.commandType = in.readInt(); this.initWritePath = Text.readString(in); this.initReadPath = Text.readString(in); this.ableCheckPoint = in.readInt(); this.nextSuperStepNum = in.readInt(); this.oldCheckPoint = in.readInt(); int count = in.readInt(); this.aggValues = new String[count]; for (int i = 0; i < count; i++) { this.aggValues[i] = Text.readString(in); }/*from w w w . j a v a2 s. c o m*/ int size = WritableUtils.readVInt(in); if (size > 0) { String[] partitionToWMName = WritableUtils.readCompressedStringArray(in); this.partitionToWorkerManagerNameAndPort = new HashMap<Integer, String>(); for (int j = 0; j < size; j++) { this.partitionToWorkerManagerNameAndPort.put(j, partitionToWMName[j]); } } this.migrateStaffIDs = in.readUTF(); this.migrateVertexCommand.readFields(in); }
From source file:org.apache.hadoop.hive.accumulo.AccumuloHiveRow.java
@Override public void readFields(DataInput dataInput) throws IOException { if (dataInput.readBoolean()) { rowId = dataInput.readUTF();//from w ww . j a va 2s . c o m } int size = dataInput.readInt(); for (int i = 0; i < size; i++) { int cfLength = dataInput.readInt(); byte[] cfData = new byte[cfLength]; dataInput.readFully(cfData, 0, cfLength); Text cf = new Text(cfData); int cqLength = dataInput.readInt(); byte[] cqData = new byte[cqLength]; dataInput.readFully(cqData, 0, cqLength); Text cq = new Text(cqData); int valSize = dataInput.readInt(); byte[] val = new byte[valSize]; for (int j = 0; j < valSize; j++) { val[j] = dataInput.readByte(); } tuples.add(new ColumnTuple(cf, cq, val)); } }
From source file:uk.ac.cam.eng.extraction.hadoop.datatypes.ProvenanceCountMap.java
@Override public void readFields(DataInput in) throws IOException { instance.clear();//from ww w. j a va 2 s.c o m byte length = in.readByte(); for (int i = 0; i < length; ++i) { byte key = in.readByte(); int value = in.readInt(); instance.put(getCached(key), getCached(value)); } }
From source file:parquet.hadoop.ParquetInputSplit.java
private ColumnChunkMetaData readColumn(DataInput in) throws IOException { CompressionCodecName codec = CompressionCodecName.values()[in.readInt()]; String[] columnPath = new String[in.readInt()]; for (int i = 0; i < columnPath.length; i++) { columnPath[i] = in.readUTF().intern(); }/* ww w . j av a 2 s . c o m*/ PrimitiveTypeName type = PrimitiveTypeName.values()[in.readInt()]; int encodingsSize = in.readInt(); Set<Encoding> encodings = new HashSet<Encoding>(encodingsSize); for (int i = 0; i < encodingsSize; i++) { encodings.add(Encoding.values()[in.readInt()]); } ColumnChunkMetaData column = ColumnChunkMetaData.get(ColumnPath.get(columnPath), type, codec, encodings, in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong()); return column; }
From source file:org.apache.hama.bsp.GroomServerStatus.java
@Override public void readFields(DataInput in) throws IOException { this.groomName = Text.readString(in); this.rpcServer = Text.readString(in); this.hostName = Text.readString(in); this.failures = in.readInt(); this.maxTasks = in.readInt(); taskReports.clear();// w ww .j a va 2s . c o m int numTasks = in.readInt(); TaskStatus status; for (int i = 0; i < numTasks; i++) { status = new TaskStatus(); status.readFields(in); taskReports.add(status); } }