List of usage examples for java.io DataOutput writeInt
void writeInt(int v) throws IOException;
int
value, which is comprised of four bytes, to the output stream. From source file:org.apache.hadoop.hdfs.protocol.LocatedBlocks.java
public void write(DataOutput out) throws IOException { out.writeLong(this.fileLength); out.writeBoolean(underConstruction); // write located blocks int nrBlocks = locatedBlockCount(); out.writeInt(nrBlocks); if (nrBlocks == 0) { return;// ww w . j av a 2s . com } for (LocatedBlock blk : this.blocks) { blk.write(out); } }
From source file:org.apache.mahout.classifier.chi_rw.data.Dataset.java
@Override public void write(DataOutput out) throws IOException { out.writeInt(attributes.length); // nb attributes for (Attribute attr : attributes) { WritableUtils.writeString(out, attr.name()); }/*from w w w . ja v a 2 s . c o m*/ Chi_RWUtils.writeArray(out, ignored); // only CATEGORICAL attributes have values for (String[] vals : values) { if (vals != null) { WritableUtils.writeStringArray(out, vals); } } // only NUMERICAL attributes have values for (double[] vals : nvalues) { if (vals != null) { Chi_RWUtils.writeArray(out, vals); } } for (double[] vals : minmaxvalues) { if (vals != null) { Chi_RWUtils.writeArray(out, vals); } } out.writeInt(labelId); out.writeInt(nbInstances); }
From source file:org.apache.mahout.classifier.chi_rwcs.data.Dataset.java
@Override public void write(DataOutput out) throws IOException { out.writeInt(attributes.length); // nb attributes for (Attribute attr : attributes) { WritableUtils.writeString(out, attr.name()); }//w w w. j a v a2s. c om Chi_RWCSUtils.writeArray(out, ignored); // only CATEGORICAL attributes have values for (String[] vals : values) { if (vals != null) { WritableUtils.writeStringArray(out, vals); } } // only NUMERICAL attributes have values for (double[] vals : nvalues) { if (vals != null) { DFUtils.writeArray(out, vals); } } for (double[] vals : minmaxvalues) { if (vals != null) { DFUtils.writeArray(out, vals); } } out.writeInt(labelId); out.writeInt(nbInstances); }
From source file:com.chinamobile.bcbsp.util.StaffStatus.java
/** serialize * write this object to out./*from w ww. j av a 2 s. c om*/ * * @param out Writes to the output stream. */ @Override public void write(DataOutput out) throws IOException { jobId.write(out); staffId.write(out); out.writeInt(progress); WritableUtils.writeEnum(out, runState); Text.writeString(out, stateString); WritableUtils.writeEnum(out, phase); out.writeLong(startTime); out.writeLong(finishTime); out.writeLong(superstepCount); if (this.faultFlag == 0) { out.writeInt(this.faultFlag); } else { out.writeInt(this.faultFlag); this.fault.write(out); } out.writeBoolean(recovery); out.writeLong(this.currentSSTime); Text.writeString(out, workerManager); }
From source file:org.apache.hadoop.hbase.io.BatchUpdate.java
public void write(final DataOutput out) throws IOException { Bytes.writeByteArray(out, this.row); out.writeLong(timestamp);//from www. j a va 2 s . c o m out.writeLong(this.size); out.writeInt(operations.size()); for (BatchOperation op : operations) { op.write(out); } out.writeLong(this.rowLock); }
From source file:com.mongodb.hadoop.mapred.input.MongoInputSplit.java
/** * Serialize the Split instance/*from ww w . j a v a 2 s. c o m*/ */ public void write(DataOutput out) throws IOException { final ObjectOutputStream objOut = new ObjectOutputStream((OutputStream) out); // TODO - Use object outputstream instead of going to <-> from string? out.writeUTF(_mongoURI.toString()); out.writeUTF(JSON.serialize(_querySpec)); out.writeUTF(JSON.serialize(_fieldSpec)); out.writeUTF(JSON.serialize(_sortSpec)); out.writeInt(_limit); out.writeInt(_skip); objOut.close(); }
From source file:org.apache.hadoop.hive.accumulo.AccumuloHiveRow.java
@Override public void write(DataOutput dataOutput) throws IOException { if (null != rowId) { dataOutput.writeBoolean(true);//w ww .j a v a 2s . c o m dataOutput.writeUTF(rowId); } else { dataOutput.writeBoolean(false); } int size = tuples.size(); dataOutput.writeInt(size); for (ColumnTuple tuple : tuples) { Text cf = tuple.getCf(), cq = tuple.getCq(); dataOutput.writeInt(cf.getLength()); dataOutput.write(cf.getBytes(), 0, cf.getLength()); dataOutput.writeInt(cq.getLength()); dataOutput.write(cq.getBytes(), 0, cq.getLength()); byte[] value = tuple.getValue(); dataOutput.writeInt(value.length); dataOutput.write(value); } }
From source file:org.apache.hadoop.hive.ql.io.orc.OrcSplit.java
@Override public void write(DataOutput out) throws IOException { //serialize path, offset, length using FileSplit super.write(out); int flags = (hasBase ? BASE_FLAG : 0) | (isOriginal ? ORIGINAL_FLAG : 0) | (hasFooter ? FOOTER_FLAG : 0) | (fileId != null ? HAS_FILEID_FLAG : 0); out.writeByte(flags);/* w w w . ja v a 2s. c om*/ out.writeInt(deltas.size()); for (AcidInputFormat.DeltaMetaData delta : deltas) { delta.write(out); } if (hasFooter) { // serialize FileMetaInfo fields Text.writeString(out, fileMetaInfo.compressionType); WritableUtils.writeVInt(out, fileMetaInfo.bufferSize); WritableUtils.writeVInt(out, fileMetaInfo.metadataSize); // serialize FileMetaInfo field footer ByteBuffer footerBuff = fileMetaInfo.footerBuffer; footerBuff.reset(); // write length of buffer WritableUtils.writeVInt(out, footerBuff.limit() - footerBuff.position()); out.write(footerBuff.array(), footerBuff.position(), footerBuff.limit() - footerBuff.position()); WritableUtils.writeVInt(out, fileMetaInfo.writerVersion.getId()); } if (fileId != null) { out.writeLong(fileId.longValue()); } }
From source file:org.kiji.schema.mapreduce.KijiDelete.java
/** {@inheritDoc} */ @Override/*from w ww . j av a 2 s . co m*/ public void write(DataOutput out) throws IOException { // EntityId. final byte[] bytes = mEntityId.getHBaseRowKey(); out.writeInt(bytes.length); out.write(bytes); // Family/Qualifier/Timestamp. writeOptionalValue(mFamily, out); writeOptionalValue(mQualifier, out); writeOptionalValue(mTimestamp, out); writeOptionalValue(mOperation, out); }
From source file:com.chinamobile.bcbsp.util.JobStatus.java
/** serialize * write this object to out.// www. j av a2 s . co m * * @param out Writes to the output stream. */ @Override public synchronized void write(DataOutput out) throws IOException { jobid.write(out); out.writeLong(setupProgress); out.writeLong(progress); out.writeLong(cleanupProgress); out.writeInt(runState); WritableUtils.writeEnum(out, this.state); out.writeLong(startTime); out.writeLong(finishTime); Text.writeString(out, user); // Text.writeString(out, schedulingInfo); out.writeLong(superstepCount); out.writeBoolean(recovery); }