List of usage examples for java.io DataInput readInt
int readInt() throws IOException;
From source file:org.ambud.marauder.source.ids.pcap.layer3.ARP.java
@Override public void decode(DataInput di, EtherFrame parent) throws IOException { this.parent = parent; this.hardType = di.readShort(); this.protoType = di.readShort(); this.hardSize = di.readByte(); this.protoAddrSize = di.readByte(); this.opCode = di.readShort(); di.readFully(senderEthAddr);//from w ww . ja v a2 s. co m this.senderIP = di.readInt(); di.readFully(targetEthAddr); this.targetIP = di.readInt(); }
From source file:edu.msu.cme.rdp.readseq.readers.core.SFFCore.java
private void parseCommonHeader() throws IOException { if (commonHeader != null) { throw new IOException("Common header already initialized"); }/*w w w . j a v a 2s.co m*/ commonHeader = new CommonHeader(); DataInput seqFile = super.getDataInput(); commonHeader.magicNumber = seqFile.readInt(); if (commonHeader.magicNumber != SeqUtils.SFF_MAGIC_NUMBER) { throw new IOException("Not an SFF File"); } commonHeader.version = seqFile.readInt(); if (commonHeader.version != 1) { throw new IOException("Cannot parse v" + commonHeader.version + " sff files"); } commonHeader.indexOffset = seqFile.readLong(); commonHeader.indexLength = seqFile.readInt(); commonHeader.numReads = seqFile.readInt(); commonHeader.headerLength = seqFile.readShort(); commonHeader.keyLength = seqFile.readShort(); commonHeader.flowLength = seqFile.readShort(); commonHeader.flowgramFormat = seqFile.readByte(); byte[] flow = new byte[commonHeader.flowLength]; super.read(flow); commonHeader.flow = new String(flow); byte[] key = new byte[commonHeader.keyLength]; super.read(key); commonHeader.key = new String(key); int readBytes = COMMON_HEADER_STATIC_SIZE + flow.length + key.length; alignToBoundary(readBytes); if (super.isSeekable() && commonHeader.indexOffset > commonHeader.headerLength) { readIndex(); } }
From source file:org.apache.carbondata.core.metadata.blocklet.BlockletInfo.java
/** * Deserialize datachunks as well for older versions like V1 and V2 *//*from ww w. j av a 2 s. com*/ private void readChunkInfoForOlderVersions(DataInput input) throws IOException { short dimChunksSize = input.readShort(); dimensionColumnChunk = new ArrayList<>(dimChunksSize); for (int i = 0; i < dimChunksSize; i++) { byte[] bytes = new byte[input.readInt()]; input.readFully(bytes); dimensionColumnChunk.add(deserializeDataChunk(bytes)); } short msrChunksSize = input.readShort(); measureColumnChunk = new ArrayList<>(msrChunksSize); for (int i = 0; i < msrChunksSize; i++) { byte[] bytes = new byte[input.readInt()]; input.readFully(bytes); measureColumnChunk.add(deserializeDataChunk(bytes)); } }
From source file:org.cloudata.core.tablet.TableSchema.java
public void readFields(DataInput in) throws IOException { int version = in.readInt(); tableName = CWritableUtils.readString(in); description = CWritableUtils.readString(in); numOfVersion = in.readInt();//w w w .j a v a2 s. c o m owner = CWritableUtils.readString(in); int count = in.readInt(); columns = new ArrayList<ColumnInfo>(); for (int i = 0; i < count; i++) { ColumnInfo columnInfo = new ColumnInfo(); columnInfo.readFields(in); columns.add(columnInfo); } if (version == 1) { //no permission info return; } int permissionLength = in.readInt(); for (int i = 0; i < permissionLength; i++) { String pUserId = CWritableUtils.readString(in); String pType = CWritableUtils.readString(in); permissions.put(pUserId, pType); } }
From source file:org.apache.nutch.crawl.MapWritable.java
public void readFields(DataInput in) throws IOException { clear();//from w w w .j a v a 2s .co m fSize = in.readInt(); if (fSize > 0) { // read class-id map fIdCount = in.readByte(); byte id; Class clazz; for (int i = 0; i < fIdCount; i++) { try { id = in.readByte(); clazz = Class.forName(Text.readString(in)); addIdEntry(id, clazz); } catch (Exception e) { if (LOG.isWarnEnabled()) { LOG.warn("Unable to load internal map entry" + e.toString()); } fIdCount--; } } KeyValueEntry entry; for (int i = 0; i < fSize; i++) { try { entry = getKeyValueEntry(in.readByte(), in.readByte()); entry.fKey.readFields(in); entry.fValue.readFields(in); if (fFirst == null) { fFirst = fLast = entry; } else { fLast = fLast.fNextEntry = entry; } } catch (IOException e) { if (LOG.isWarnEnabled()) { LOG.warn("Unable to load meta data entry, ignoring.. : " + e.toString()); } fSize--; } } } }
From source file:eu.stratosphere.nephele.configuration.Configuration.java
/** * {@inheritDoc}//from www . j a v a2 s .com */ @Override public void read(final DataInput in) throws IOException { synchronized (this.confData) { final int numberOfProperties = in.readInt(); for (int i = 0; i < numberOfProperties; i++) { final String key = StringRecord.readString(in); final String value = StringRecord.readString(in); this.confData.put(key, value); } } }
From source file:org.apache.hadoop.hive.ql.io.orc.OrcSplit.java
@Override public void readFields(DataInput in) throws IOException { //deserialize path, offset, length using FileSplit super.readFields(in); byte flags = in.readByte(); hasFooter = (FOOTER_FLAG & flags) != 0; isOriginal = (ORIGINAL_FLAG & flags) != 0; hasBase = (BASE_FLAG & flags) != 0; boolean hasFileId = (HAS_FILEID_FLAG & flags) != 0; deltas.clear();/* ww w . j a v a 2s . c om*/ int numDeltas = in.readInt(); for (int i = 0; i < numDeltas; i++) { AcidInputFormat.DeltaMetaData dmd = new AcidInputFormat.DeltaMetaData(); dmd.readFields(in); deltas.add(dmd); } if (hasFooter) { // deserialize FileMetaInfo fields String compressionType = Text.readString(in); int bufferSize = WritableUtils.readVInt(in); int metadataSize = WritableUtils.readVInt(in); // deserialize FileMetaInfo field footer int footerBuffSize = WritableUtils.readVInt(in); ByteBuffer footerBuff = ByteBuffer.allocate(footerBuffSize); in.readFully(footerBuff.array(), 0, footerBuffSize); OrcFile.WriterVersion writerVersion = ReaderImpl.getWriterVersion(WritableUtils.readVInt(in)); fileMetaInfo = new ReaderImpl.FileMetaInfo(compressionType, bufferSize, metadataSize, footerBuff, writerVersion); } if (hasFileId) { fileId = in.readLong(); } }
From source file:com.chinamobile.bcbsp.util.JobStatus.java
/** * deserialize/*w w w. j av a 2 s. co m*/ * * @param in Reads some bytes from an input. */ @Override public synchronized void readFields(DataInput in) throws IOException { this.jobid = new BSPJobID(); jobid.readFields(in); this.setupProgress = in.readLong(); this.progress = in.readLong(); this.cleanupProgress = in.readLong(); this.runState = in.readInt(); this.state = WritableUtils.readEnum(in, State.class); this.startTime = in.readLong(); this.finishTime = in.readLong(); this.user = Text.readString(in); // this.schedulingInfo = Text.readString(in); this.superstepCount = in.readLong(); this.recovery = in.readBoolean(); }
From source file:eu.stratosphere.configuration.Configuration.java
@Override public void read(final DataInput in) throws IOException { synchronized (this.confData) { final int numberOfProperties = in.readInt(); for (int i = 0; i < numberOfProperties; i++) { final String key = StringRecord.readString(in); final String value = StringRecord.readString(in); this.confData.put(key, value); }// w ww. j av a2s . c o m } }
From source file:io.hops.erasure_coding.PolicyInfo.java
public void readFields(DataInput in) throws IOException { String text = Text.readString(in); if (text.length() == 0) { this.srcPath = null; } else {/*from ww w .jav a 2 s . c o m*/ this.srcPath = new Path(text); } this.policyName = Text.readString(in); this.codecId = Text.readString(in); this.description = Text.readString(in); for (int n = in.readInt(); n > 0; n--) { String name = Text.readString(in); String value = Text.readString(in); properties.setProperty(name, value); } }