List of usage examples for java.io DataOutput writeLong
void writeLong(long v) throws IOException;
long
value, which is comprised of eight bytes, to the output stream. From source file:org.apache.hadoop.mapred.SampleTaskStatus.java
public void write(DataOutput out) throws IOException { if (sampleMapTaskId == null) sampleMapTaskId = new TaskAttemptID(); sampleMapTaskId.write(out);//from www . j a va2s .co m out.writeUTF(sampleMapTracker); out.writeLong(readInputStartTime); out.writeLong(readInputDoneTime); out.writeLong(writeOutputStartTime); out.writeLong(writeOutputDoneTime); out.writeLong(networkSampleMapCopyDurationMilliSec); out.writeLong(additionalSpillDurationMilliSec); out.writeLong(additionalSpillSize); }
From source file:org.kiji.schema.mapreduce.KijiIncrement.java
/** {@inheritDoc} */ @Override/*from ww w . java2 s . com*/ public void write(DataOutput out) throws IOException { final byte[] bytes = mEntityId.getHBaseRowKey(); out.writeInt(bytes.length); out.write(bytes); // Family/Qualifier/Amount. out.writeUTF(mFamily); out.writeUTF(mQualifier); out.writeLong(mAmount); }
From source file:com.flipkart.fdp.migration.distcp.config.ConnectionConfig.java
@Override public void write(DataOutput out) throws IOException { Text.writeString(out, getUserName()); Text.writeString(out, getUserPassword()); Text.writeString(out, getKeyFile()); Text.writeString(out, getConnectionURL()); Text.writeString(out, String.valueOf(securityType)); out.writeLong(getFreeSpaceInBytes()); Text.writeString(out, getPath()); }
From source file:org.apache.hadoop.mapred.SortedRanges.java
public synchronized void write(DataOutput out) throws IOException { out.writeLong(indicesCount); out.writeInt(ranges.size());//w w w . j a va2 s . co m Iterator<Range> it = ranges.iterator(); while (it.hasNext()) { Range range = it.next(); range.write(out); } }
From source file:org.apache.carbondata.core.metadata.blocklet.BlockletInfo.java
@Override public void write(DataOutput output) throws IOException { output.writeLong(dimensionOffset); output.writeLong(measureOffsets);/*from w w w .j av a2 s .com*/ int dsize = dimensionChunkOffsets != null ? dimensionChunkOffsets.size() : 0; output.writeShort(dsize); for (int i = 0; i < dsize; i++) { output.writeLong(dimensionChunkOffsets.get(i)); } for (int i = 0; i < dsize; i++) { output.writeInt(dimensionChunksLength.get(i)); } int mSize = measureChunkOffsets != null ? measureChunkOffsets.size() : 0; output.writeShort(mSize); for (int i = 0; i < mSize; i++) { output.writeLong(measureChunkOffsets.get(i)); } for (int i = 0; i < mSize; i++) { output.writeInt(measureChunksLength.get(i)); } writeChunkInfoForOlderVersions(output); boolean isSortedPresent = (isSorted != null); output.writeBoolean(isSortedPresent); if (isSortedPresent) { output.writeBoolean(isSorted); } if (null != getNumberOfRowsPerPage()) { output.writeShort(getNumberOfRowsPerPage().length); for (int i = 0; i < getNumberOfRowsPerPage().length; i++) { output.writeInt(getNumberOfRowsPerPage()[i]); } } }
From source file:org.apache.cassandra.io.LazilyCompactedRow.java
public void write(DataOutput out) throws IOException { if (rows.size() == 1 && !shouldPurge && !controller.needDeserialize()) { SSTableIdentityIterator row = rows.get(0); assert row.dataSize > 0; out.writeLong(row.dataSize); row.echoData(out);/* w ww . jav a 2s .co m*/ return; } DataOutputBuffer clockOut = new DataOutputBuffer(); ColumnFamily.serializer().serializeCFInfo(emptyColumnFamily, clockOut); long dataSize = headerBuffer.getLength() + clockOut.getLength() + columnSerializedSize; assert dataSize > 0; out.writeLong(dataSize); out.write(headerBuffer.getData(), 0, headerBuffer.getLength()); out.write(clockOut.getData(), 0, clockOut.getLength()); out.writeInt(columnCount); Iterator<IColumn> iter = iterator(); while (iter.hasNext()) { IColumn column = iter.next(); emptyColumnFamily.getColumnSerializer().serialize(column, out); } }
From source file:org.apache.hadoop.hbase.HServerInfo.java
public void write(DataOutput out) throws IOException { this.serverAddress.write(out); out.writeLong(this.startCode); this.load.write(out); // Still serializing the info port for backward compatibility but it is not used. out.writeInt(HConstants.DEFAULT_REGIONSERVER_INFOPORT); out.writeUTF(hostname);/*www.j a v a 2s . co m*/ if (sendSequenceIds) { HbaseMapWritable<byte[], Long> sequenceIdsWritable = new HbaseMapWritable<byte[], Long>( flushedSequenceIdByRegion); sequenceIdsWritable.write(out); } }
From source file:org.apache.pig.data.SchemaTuple.java
protected static void write(DataOutput out, DateTime v) throws IOException { out.writeLong(v.getMillis()); out.writeShort(v.getZone().getOffset(v) / ONE_MINUTE); }
From source file:com.chinamobile.bcbsp.util.JobStatus.java
/** serialize * write this object to out.// ww w . j av a 2s .com * * @param out Writes to the output stream. */ @Override public synchronized void write(DataOutput out) throws IOException { jobid.write(out); out.writeLong(setupProgress); out.writeLong(progress); out.writeLong(cleanupProgress); out.writeInt(runState); WritableUtils.writeEnum(out, this.state); out.writeLong(startTime); out.writeLong(finishTime); Text.writeString(out, user); // Text.writeString(out, schedulingInfo); out.writeLong(superstepCount); out.writeBoolean(recovery); }
From source file:org.apache.hadoop.hbase.io.BatchUpdate.java
public void write(final DataOutput out) throws IOException { Bytes.writeByteArray(out, this.row); out.writeLong(timestamp); out.writeLong(this.size); out.writeInt(operations.size());//from w w w .j av a 2s . c o m for (BatchOperation op : operations) { op.write(out); } out.writeLong(this.rowLock); }