List of usage examples for java.io DataInput readLong
long readLong() throws IOException;
From source file:org.apache.pig.data.BinInterSedes.java
private DataBag readBag(DataInput in, byte type) throws IOException { DataBag bag = mBagFactory.newDefaultBag(); long size;/*ww w . j a v a2s. c o m*/ // determine size of bag switch (type) { case TINYBAG: size = in.readUnsignedByte(); break; case SMALLBAG: size = in.readUnsignedShort(); break; case BAG: size = in.readLong(); break; default: int errCode = 2219; String msg = "Unexpected data while reading bag " + "from binary file."; throw new ExecException(msg, errCode, PigException.BUG); } for (long i = 0; i < size; i++) { try { Object o = readDatum(in); bag.add((Tuple) o); } catch (ExecException ee) { throw ee; } } return bag; }
From source file:org.apache.pig.data.BinInterSedes.java
/** * Expects binInterSedes data types (NOT DataType types!) * <p>/*from w w w.j a v a 2 s . c o m*/ * * @see org.apache.pig.data.InterSedes#readDatum(java.io.DataInput, byte) */ @Override public Object readDatum(DataInput in, byte type) throws IOException, ExecException { switch (type) { case TUPLE_0: case TUPLE_1: case TUPLE_2: case TUPLE_3: case TUPLE_4: case TUPLE_5: case TUPLE_6: case TUPLE_7: case TUPLE_8: case TUPLE_9: case TUPLE: case TINYTUPLE: case SMALLTUPLE: return SedesHelper.readGenericTuple(in, type); case BAG: case TINYBAG: case SMALLBAG: return readBag(in, type); case MAP: case TINYMAP: case SMALLMAP: return readMap(in, type); case INTERNALMAP: return readInternalMap(in); case INTEGER_0: return Integer.valueOf(0); case INTEGER_1: return Integer.valueOf(1); case INTEGER_INBYTE: return Integer.valueOf(in.readByte()); case INTEGER_INSHORT: return Integer.valueOf(in.readShort()); case INTEGER: return Integer.valueOf(in.readInt()); case LONG_0: return Long.valueOf(0); case LONG_1: return Long.valueOf(1); case LONG_INBYTE: return Long.valueOf(in.readByte()); case LONG_INSHORT: return Long.valueOf(in.readShort()); case LONG_ININT: return Long.valueOf(in.readInt()); case LONG: return Long.valueOf(in.readLong()); case DATETIME: return new DateTime(in.readLong(), DateTimeZone.forOffsetMillis(in.readShort() * ONE_MINUTE)); case FLOAT: return Float.valueOf(in.readFloat()); case DOUBLE: return Double.valueOf(in.readDouble()); case BIGINTEGER: return readBigInteger(in); case BIGDECIMAL: return readBigDecimal(in); case BOOLEAN_TRUE: return Boolean.valueOf(true); case BOOLEAN_FALSE: return Boolean.valueOf(false); case BYTE: return Byte.valueOf(in.readByte()); case TINYBYTEARRAY: case SMALLBYTEARRAY: case BYTEARRAY: return new DataByteArray(SedesHelper.readBytes(in, type)); case CHARARRAY: case SMALLCHARARRAY: return SedesHelper.readChararray(in, type); case GENERIC_WRITABLECOMPARABLE: return readWritable(in); case SCHEMA_TUPLE_BYTE_INDEX: case SCHEMA_TUPLE_SHORT_INDEX: case SCHEMA_TUPLE: return readSchemaTuple(in, type); case NULL: return null; default: throw new RuntimeException("Unexpected data type " + type + " found in stream."); } }
From source file:org.apache.pig.data.DataBag.java
/** * Read a bag from disk.// w ww.j a v a2s . c o m * @param in DataInput to read data from. * @throws IOException (passes it on from underlying calls). */ static DataBag read(DataInput in) throws IOException { long size = in.readLong(); // Always use a default data bag, as if it was sorted or distinct // we're guaranteed it was written out that way already, and we // don't need to mess with it. DataBag ret = BagFactory.getInstance().newDefaultBag(); for (long i = 0; i < size; i++) { Tuple t = new Tuple(); t.readFields(in); ret.add(t); } return ret; }
From source file:org.apache.pig.data.DataReaderWriter.java
public static Object readDatum(DataInput in, byte type) throws IOException, ExecException { switch (type) { case DataType.TUPLE: return bytesToTuple(in); case DataType.BAG: return bytesToBag(in); case DataType.MAP: return bytesToMap(in); case DataType.INTERNALMAP: return bytesToInternalMap(in); case DataType.INTEGER: return Integer.valueOf(in.readInt()); case DataType.LONG: return Long.valueOf(in.readLong()); case DataType.FLOAT: return Float.valueOf(in.readFloat()); case DataType.DOUBLE: return Double.valueOf(in.readDouble()); case DataType.BOOLEAN: return Boolean.valueOf(in.readBoolean()); case DataType.BYTE: return Byte.valueOf(in.readByte()); case DataType.BYTEARRAY: { int size = in.readInt(); byte[] ba = new byte[size]; in.readFully(ba);/*from w w w. java2 s . c om*/ return new DataByteArray(ba); } case DataType.BIGCHARARRAY: return bytesToBigCharArray(in); case DataType.CHARARRAY: return bytesToCharArray(in); case DataType.GENERIC_WRITABLECOMPARABLE: return bytesToWritable(in); case DataType.NULL: return null; default: throw new RuntimeException("Unexpected data type " + type + " found in stream."); } }
From source file:org.apache.pig.data.DefaultAbstractBag.java
/** * Read a bag from disk.//from ww w.j a v a2s . c o m * @param in DataInput to read data from. * @throws IOException (passes it on from underlying calls). */ @Override public void readFields(DataInput in) throws IOException { long size = in.readLong(); for (long i = 0; i < size; i++) { try { Object o = sedes.readDatum(in); add((Tuple) o); } catch (ExecException ee) { throw ee; } } }
From source file:org.apache.pig.data.SchemaTuple.java
protected static DateTime read(DataInput in, DateTime v) throws IOException { return new DateTime(in.readLong(), DateTimeZone.forOffsetMillis(in.readShort() * ONE_MINUTE)); }
From source file:org.apache.rya.accumulo.mr.RyaStatementWritable.java
/** * Loads a RyaStatementWritable by reading data from an input stream. * Creates a new RyaStatement and assigns it to this RyaStatementWritable. * @param dataInput An stream containing serialized statement data. *//* w w w . j a v a 2 s .com*/ @Override public void readFields(DataInput dataInput) throws IOException { byte[] row = read(dataInput); byte[] columnFamily = read(dataInput); byte[] columnQualifier = read(dataInput); byte[] columnVisibility = read(dataInput); byte[] value = read(dataInput); boolean b = dataInput.readBoolean(); Long timestamp = null; if (b) { timestamp = dataInput.readLong(); } try { ryaStatement = ryaContext.deserializeTriple(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, new TripleRow(row, columnFamily, columnQualifier)); ryaStatement.setColumnVisibility(columnVisibility); ryaStatement.setValue(value); ryaStatement.setTimestamp(timestamp); } catch (TripleRowResolverException e) { throw new IOException(e); } }
From source file:org.apache.sysml.runtime.compress.CompressedMatrixBlock.java
@Override public void readFields(DataInput in) throws IOException { boolean compressed = in.readBoolean(); //deserialize uncompressed block if (!compressed) { super.readFields(in); return;/*from w w w . java 2 s. c o m*/ } //deserialize compressed block rlen = in.readInt(); clen = in.readInt(); nonZeros = in.readLong(); int ncolGroups = in.readInt(); _colGroups = new ArrayList<ColGroup>(ncolGroups); for (int i = 0; i < ncolGroups; i++) { CompressionType ctype = CompressionType.values()[in.readByte()]; ColGroup grp = null; //create instance of column group switch (ctype) { case UNCOMPRESSED: grp = new ColGroupUncompressed(); break; case OLE_BITMAP: grp = new ColGroupOLE(); break; case RLE_BITMAP: grp = new ColGroupRLE(); break; } //deserialize and add column group grp.readFields(in); _colGroups.add(grp); } }
From source file:org.apache.sysml.runtime.matrix.data.FrameBlock.java
@Override public void readFields(DataInput in) throws IOException { //read head (rows, cols) _numRows = in.readInt();/* w w w .j ava 2s .co m*/ int numCols = in.readInt(); boolean isDefaultMeta = in.readBoolean(); //allocate schema/meta data arrays _schema = (_schema != null && _schema.length == numCols) ? _schema : new ValueType[numCols]; _colnames = (_colnames != null && _colnames.length == numCols) ? _colnames : new String[numCols]; _colmeta = (_colmeta != null && _colmeta.length == numCols) ? _colmeta : new ColumnMetadata[numCols]; _coldata = (_coldata != null && _coldata.length == numCols) ? _coldata : new Array[numCols]; //read columns (value type, meta, data) for (int j = 0; j < numCols; j++) { ValueType vt = ValueType.values()[in.readByte()]; String name = isDefaultMeta ? createColName(j) : in.readUTF(); long ndistinct = isDefaultMeta ? 0 : in.readLong(); String mvvalue = isDefaultMeta ? null : in.readUTF(); Array arr = null; switch (vt) { case STRING: arr = new StringArray(new String[_numRows]); break; case BOOLEAN: arr = new BooleanArray(new boolean[_numRows]); break; case INT: arr = new LongArray(new long[_numRows]); break; case DOUBLE: arr = new DoubleArray(new double[_numRows]); break; default: throw new IOException("Unsupported value type: " + vt); } arr.readFields(in); _schema[j] = vt; _colnames[j] = name; _colmeta[j] = new ColumnMetadata(ndistinct, (mvvalue == null || mvvalue.isEmpty()) ? null : mvvalue); _coldata[j] = arr; } }
From source file:org.apache.sysml.runtime.matrix.data.MatrixBlock.java
private long readNnzInfo(DataInput in, boolean ultrasparse) throws IOException { //note: if ultrasparse, int always sufficient because nnz<rlen // where rlen is limited to integer long lrlen = (long) rlen; long lclen = (long) clen; //read long if required, otherwise int (see writeNnzInfo, consistency required) if (lrlen * lclen > Integer.MAX_VALUE && !ultrasparse) { nonZeros = in.readLong(); } else {/*from w ww. j a va 2s . c o m*/ nonZeros = in.readInt(); } return nonZeros; }