List of usage examples for java.io DataInput readDouble
double readDouble() throws IOException;
From source file:com.marklogic.tree.ExpandedTree.java
@Override public void readFields(DataInput in) throws IOException { uriKey = in.readLong();/*from ww w . j a v a 2s . c o m*/ uniqKey = in.readLong(); linkKey = in.readLong(); numKeys = in.readInt(); if (numKeys > 0) { keys = new long[numKeys]; for (int i = 0; i < numKeys; i++) { keys[i] = in.readLong(); } } int atomDataLen = in.readInt(); if (atomDataLen > 0) { atomData = new byte[atomDataLen]; for (int i = 0; i < atomDataLen; i++) { atomData[i] = in.readByte(); } } atomLimit = in.readInt(); if (atomLimit > 0) { atomIndex = new int[atomLimit + 1]; for (int i = 0; i < atomLimit + 1; i++) { atomIndex[i] = in.readInt(); } } int nodeNameNameAtomLen = in.readInt(); if (nodeNameNameAtomLen > 0) { nodeNameNameAtom = new int[nodeNameNameAtomLen]; nodeNameNamespaceAtom = new int[nodeNameNameAtomLen]; for (int i = 0; i < nodeNameNameAtomLen; i++) { nodeNameNameAtom[i] = in.readInt(); nodeNameNamespaceAtom[i] = in.readInt(); } } numNodeReps = in.readInt(); if (numNodeReps > 0) { nodes = new NodeImpl[numNodeReps]; nodeOrdinal = new long[numNodeReps]; nodeKind = new byte[numNodeReps]; nodeRepID = new int[numNodeReps]; nodeParentNodeRepID = new int[numNodeReps]; for (int i = 0; i < numNodeReps; i++) { nodeOrdinal[i] = in.readLong(); nodeKind[i] = in.readByte(); nodeRepID[i] = in.readInt(); nodeParentNodeRepID[i] = in.readInt(); } } int numElemNodeReps = in.readInt(); if (numElemNodeReps > 0) { elemNodeNodeNameRepID = new int[numElemNodeReps]; elemNodeAttrNodeRepID = new int[numElemNodeReps]; elemNodeChildNodeRepID = new int[numElemNodeReps]; elemNodeElemDeclRepID = new int[numElemNodeReps]; elemNodeNumAttributes = new int[numElemNodeReps]; elemNodeNumDefaultAttrs = new int[numElemNodeReps]; elemNodeNumChildren = new int[numElemNodeReps]; elemNodeFlags = new int[numElemNodeReps]; for (int i = 0; i < numElemNodeReps; i++) { elemNodeNodeNameRepID[i] = in.readInt(); elemNodeAttrNodeRepID[i] = in.readInt(); elemNodeChildNodeRepID[i] = in.readInt(); elemNodeElemDeclRepID[i] = in.readInt(); elemNodeNumAttributes[i] = in.readInt(); elemNodeNumDefaultAttrs[i] = in.readInt(); elemNodeNumChildren[i] = in.readInt(); elemNodeFlags[i] = in.readInt(); } } int numAttrNodeReps = in.readInt(); if (numAttrNodeReps > 0) { attrNodeNodeNameRepID = new int[numAttrNodeReps]; attrNodeTextRepID = new int[numAttrNodeReps]; attrNodeAttrDeclRepID = new int[numAttrNodeReps]; for (int i = 0; i < numAttrNodeReps; i++) { attrNodeNodeNameRepID[i] = in.readInt(); attrNodeTextRepID[i] = in.readInt(); attrNodeAttrDeclRepID[i] = in.readInt(); } } numLinkNodeReps = in.readInt(); if (numLinkNodeReps > 0) { linkNodeKey = new long[numLinkNodeReps]; linkNodeNodeCount = new long[numLinkNodeReps]; linkNodeNodeNameRepID = new int[numLinkNodeReps]; linkNodeNodeRepID = new int[numLinkNodeReps]; for (int i = 0; i < numLinkNodeReps; i++) { linkNodeKey[i] = in.readLong(); linkNodeNodeCount[i] = in.readLong(); linkNodeNodeNameRepID[i] = in.readInt(); linkNodeNodeRepID[i] = in.readInt(); } } int numDocNodeReps = in.readInt(); if (numDocNodeReps > 0) { docNodeTextRepID = new int[numDocNodeReps]; docNodeChildNodeRepID = new int[numDocNodeReps]; docNodeNumChildren = new int[numDocNodeReps]; for (int i = 0; i < numDocNodeReps; i++) { docNodeTextRepID[i] = in.readInt(); docNodeChildNodeRepID[i] = in.readInt(); docNodeNumChildren[i] = in.readInt(); } } int numPINodeReps = in.readInt(); if (numPINodeReps > 0) { piNodeTargetAtom = new int[numPINodeReps]; piNodeTextRepID = new int[numPINodeReps]; for (int i = 0; i < numPINodeReps; i++) { piNodeTargetAtom[i] = in.readInt(); piNodeTextRepID[i] = in.readInt(); } } numNSNodeReps = in.readInt(); if (numNSNodeReps > 0) { nsNodeOrdinal = new long[numNSNodeReps]; nsNodePrevNSNodeRepID = new int[numNSNodeReps]; nsNodePrefixAtom = new int[numNSNodeReps]; nsNodeUriAtom = new int[numNSNodeReps]; for (int i = 0; i < numNSNodeReps; i++) { nsNodeOrdinal[i] = in.readLong(); nsNodePrevNSNodeRepID[i] = in.readInt(); nsNodePrefixAtom[i] = in.readInt(); nsNodeUriAtom[i] = in.readInt(); } } // skip permission node since it's not exposed to the API uriTextRepID = in.readInt(); colsTextRepID = in.readInt(); numTextReps = in.readInt(); if (numTextReps > 0) { textReps = new int[numTextReps]; for (int i = 0; i < numTextReps; i++) { textReps[i] = in.readInt(); } } int numArrayNodeReps = in.readInt(); if (numArrayNodeReps > 0) { arrayNodeTextRepID = new int[numArrayNodeReps]; arrayNodeChildNodeRepID = new int[numArrayNodeReps]; arrayNodeNumChildren = new int[numArrayNodeReps]; for (int i = 0; i < numArrayNodeReps; i++) { arrayNodeTextRepID[i] = in.readInt(); arrayNodeChildNodeRepID[i] = in.readInt(); arrayNodeNumChildren[i] = in.readInt(); } } int numDoubles = in.readInt(); if (numDoubles > 0) { doubles = new double[numDoubles]; for (int i = 0; i < numDoubles; i++) { doubles[i] = in.readDouble(); } } }
From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java
/** * Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding./*from w w w . j a v a 2s . c o m*/ * @param in * @param objectWritable * @param conf * @return the object * @throws IOException */ @SuppressWarnings("unchecked") public static Object readObject(DataInput in, HbaseObjectWritable objectWritable, Configuration conf) throws IOException { Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); Object instance; if (declaredClass.isPrimitive()) { // primitive types if (declaredClass == Boolean.TYPE) { // boolean instance = Boolean.valueOf(in.readBoolean()); } else if (declaredClass == Character.TYPE) { // char instance = Character.valueOf(in.readChar()); } else if (declaredClass == Byte.TYPE) { // byte instance = Byte.valueOf(in.readByte()); } else if (declaredClass == Short.TYPE) { // short instance = Short.valueOf(in.readShort()); } else if (declaredClass == Integer.TYPE) { // int instance = Integer.valueOf(in.readInt()); } else if (declaredClass == Long.TYPE) { // long instance = Long.valueOf(in.readLong()); } else if (declaredClass == Float.TYPE) { // float instance = Float.valueOf(in.readFloat()); } else if (declaredClass == Double.TYPE) { // double instance = Double.valueOf(in.readDouble()); } else if (declaredClass == Void.TYPE) { // void instance = null; } else { throw new IllegalArgumentException("Not a primitive: " + declaredClass); } } else if (declaredClass.isArray()) { // array if (declaredClass.equals(byte[].class)) { instance = Bytes.readByteArray(in); } else if (declaredClass.equals(Result[].class)) { instance = Result.readArray(in); } else { int length = in.readInt(); instance = Array.newInstance(declaredClass.getComponentType(), length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE Class<?> componentType = readClass(conf, in); int length = in.readInt(); instance = Array.newInstance(componentType, length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } else if (List.class.isAssignableFrom(declaredClass)) { // List int length = in.readInt(); instance = new ArrayList(length); for (int i = 0; i < length; i++) { ((ArrayList) instance).add(readObject(in, conf)); } } else if (declaredClass == String.class) { // String instance = Text.readString(in); } else if (declaredClass.isEnum()) { // enum instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in)); } else if (declaredClass == Message.class) { String className = Text.readString(in); try { declaredClass = getClassByName(conf, className); instance = tryInstantiateProtobuf(declaredClass, in); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { // Writable or Serializable Class instanceClass = null; int b = (byte) WritableUtils.readVInt(in); if (b == NOT_ENCODED) { String className = Text.readString(in); try { instanceClass = getClassByName(conf, className); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { instanceClass = CODE_TO_CLASS.get(b); } if (Writable.class.isAssignableFrom(instanceClass)) { Writable writable = WritableFactories.newInstance(instanceClass, conf); try { writable.readFields(in); } catch (Exception e) { LOG.error("Error in readFields", e); throw new IOException("Error in readFields", e); } instance = writable; if (instanceClass == NullInstance.class) { // null declaredClass = ((NullInstance) instance).declaredClass; instance = null; } } else { int length = in.readInt(); byte[] objectBytes = new byte[length]; in.readFully(objectBytes); ByteArrayInputStream bis = null; ObjectInputStream ois = null; try { bis = new ByteArrayInputStream(objectBytes); ois = new ObjectInputStream(bis); instance = ois.readObject(); } catch (ClassNotFoundException e) { LOG.error("Class not found when attempting to deserialize object", e); throw new IOException("Class not found when attempting to " + "deserialize object", e); } finally { if (bis != null) bis.close(); if (ois != null) ois.close(); } } } if (objectWritable != null) { // store values objectWritable.declaredClass = declaredClass; objectWritable.instance = instance; } return instance; }
From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java
/** * Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding.//from www .j av a 2 s.co m * @param in * @param objectWritable * @param conf * @return the object * @throws IOException */ @SuppressWarnings("unchecked") static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf) throws IOException { Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); Object instance; if (declaredClass.isPrimitive()) { // primitive types if (declaredClass == Boolean.TYPE) { // boolean instance = Boolean.valueOf(in.readBoolean()); } else if (declaredClass == Character.TYPE) { // char instance = Character.valueOf(in.readChar()); } else if (declaredClass == Byte.TYPE) { // byte instance = Byte.valueOf(in.readByte()); } else if (declaredClass == Short.TYPE) { // short instance = Short.valueOf(in.readShort()); } else if (declaredClass == Integer.TYPE) { // int instance = Integer.valueOf(in.readInt()); } else if (declaredClass == Long.TYPE) { // long instance = Long.valueOf(in.readLong()); } else if (declaredClass == Float.TYPE) { // float instance = Float.valueOf(in.readFloat()); } else if (declaredClass == Double.TYPE) { // double instance = Double.valueOf(in.readDouble()); } else if (declaredClass == Void.TYPE) { // void instance = null; } else { throw new IllegalArgumentException("Not a primitive: " + declaredClass); } } else if (declaredClass.isArray()) { // array if (declaredClass.equals(byte[].class)) { instance = Bytes.readByteArray(in); } else { int length = in.readInt(); instance = Array.newInstance(declaredClass.getComponentType(), length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE Class<?> componentType = readClass(conf, in); int length = in.readInt(); instance = Array.newInstance(componentType, length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } else if (List.class.isAssignableFrom(declaredClass)) { // List int length = in.readInt(); instance = new ArrayList(length); for (int i = 0; i < length; i++) { ((ArrayList) instance).add(readObject(in, conf)); } } else if (declaredClass == String.class) { // String instance = Text.readString(in); } else if (declaredClass.isEnum()) { // enum instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in)); } else if (declaredClass == Message.class) { String className = Text.readString(in); try { declaredClass = getClassByName(conf, className); instance = tryInstantiateProtobuf(declaredClass, in); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else if (Scan.class.isAssignableFrom(declaredClass)) { int length = in.readInt(); byte[] scanBytes = new byte[length]; in.readFully(scanBytes); ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder(); instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build()); } else { // Writable or Serializable Class instanceClass = null; int b = (byte) WritableUtils.readVInt(in); if (b == NOT_ENCODED) { String className = Text.readString(in); try { instanceClass = getClassByName(conf, className); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { instanceClass = CODE_TO_CLASS.get(b); } if (Writable.class.isAssignableFrom(instanceClass)) { Writable writable = WritableFactories.newInstance(instanceClass, conf); try { writable.readFields(in); } catch (Exception e) { LOG.error("Error in readFields", e); throw new IOException("Error in readFields", e); } instance = writable; if (instanceClass == NullInstance.class) { // null declaredClass = ((NullInstance) instance).declaredClass; instance = null; } } else { int length = in.readInt(); byte[] objectBytes = new byte[length]; in.readFully(objectBytes); ByteArrayInputStream bis = null; ObjectInputStream ois = null; try { bis = new ByteArrayInputStream(objectBytes); ois = new ObjectInputStream(bis); instance = ois.readObject(); } catch (ClassNotFoundException e) { LOG.error("Class not found when attempting to deserialize object", e); throw new IOException("Class not found when attempting to " + "deserialize object", e); } finally { if (bis != null) bis.close(); if (ois != null) ois.close(); } } } if (objectWritable != null) { // store values objectWritable.declaredClass = declaredClass; objectWritable.instance = instance; } return instance; }
From source file:org.apache.hama.ml.ann.NeuralNetwork.java
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override//from ww w . j av a 2s . co m public void readFields(DataInput input) throws IOException { // read model type this.modelType = WritableUtils.readString(input); // read learning rate this.learningRate = input.readDouble(); // read model path this.modelPath = WritableUtils.readString(input); if (this.modelPath.equals("null")) { this.modelPath = null; } // read feature transformer int bytesLen = input.readInt(); byte[] featureTransformerBytes = new byte[bytesLen]; for (int i = 0; i < featureTransformerBytes.length; ++i) { featureTransformerBytes[i] = input.readByte(); } Class<? extends FeatureTransformer> featureTransformerCls = (Class<? extends FeatureTransformer>) SerializationUtils .deserialize(featureTransformerBytes); Constructor[] constructors = featureTransformerCls.getDeclaredConstructors(); Constructor constructor = constructors[0]; try { this.featureTransformer = (FeatureTransformer) constructor.newInstance(new Object[] {}); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } }
From source file:org.apache.hama.ml.perception.SmallMultiLayerPerceptron.java
@SuppressWarnings("rawtypes") @Override//from www. j a v a 2 s. com public void readFields(DataInput input) throws IOException { this.MLPType = WritableUtils.readString(input); this.learningRate = input.readDouble(); this.regularization = input.readDouble(); this.momentum = input.readDouble(); this.numberOfLayers = input.readInt(); this.squashingFunctionName = WritableUtils.readString(input); this.costFunctionName = WritableUtils.readString(input); this.squashingFunction = FunctionFactory.createDoubleFunction(this.squashingFunctionName); this.costFunction = FunctionFactory.createDoubleDoubleFunction(this.costFunctionName); // read the number of neurons for each layer this.layerSizeArray = new int[this.numberOfLayers]; for (int i = 0; i < numberOfLayers; ++i) { this.layerSizeArray[i] = input.readInt(); } this.weightMatrice = new DenseDoubleMatrix[this.numberOfLayers - 1]; for (int i = 0; i < numberOfLayers - 1; ++i) { this.weightMatrice[i] = (DenseDoubleMatrix) MatrixWritable.read(input); } // read feature transformer int bytesLen = input.readInt(); byte[] featureTransformerBytes = new byte[bytesLen]; for (int i = 0; i < featureTransformerBytes.length; ++i) { featureTransformerBytes[i] = input.readByte(); } Class featureTransformerCls = (Class) SerializationUtils.deserialize(featureTransformerBytes); Constructor constructor = featureTransformerCls.getConstructors()[0]; try { this.featureTransformer = (FeatureTransformer) constructor.newInstance(new Object[] {}); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } }
From source file:org.apache.hawq.pxf.service.io.GPDBWritable.java
@Override public void readFields(DataInput in) throws IOException { /*/*from w w w .j a v a 2s . c om*/ * extract pkt len. * * GPSQL-1107: * The DataInput might already be empty (EOF), but we can't check it beforehand. * If that's the case, pktlen is updated to -1, to mark that the object is still empty. * (can be checked with isEmpty()). */ pktlen = readPktLen(in); if (isEmpty()) { return; } /* extract the version and col cnt */ int version = in.readShort(); int curOffset = 4 + 2; int colCnt; /* !!! Check VERSION !!! */ if (version != GPDBWritable.VERSION && version != GPDBWritable.PREV_VERSION) { throw new IOException("Current GPDBWritable version(" + GPDBWritable.VERSION + ") does not match input version(" + version + ")"); } if (version == GPDBWritable.VERSION) { errorFlag = in.readByte(); curOffset += 1; } colCnt = in.readShort(); curOffset += 2; /* Extract Column Type */ colType = new int[colCnt]; DBType[] coldbtype = new DBType[colCnt]; for (int i = 0; i < colCnt; i++) { int enumType = (in.readByte()); curOffset += 1; if (enumType == DBType.BIGINT.ordinal()) { colType[i] = BIGINT.getOID(); coldbtype[i] = DBType.BIGINT; } else if (enumType == DBType.BOOLEAN.ordinal()) { colType[i] = BOOLEAN.getOID(); coldbtype[i] = DBType.BOOLEAN; } else if (enumType == DBType.FLOAT8.ordinal()) { colType[i] = FLOAT8.getOID(); coldbtype[i] = DBType.FLOAT8; } else if (enumType == DBType.INTEGER.ordinal()) { colType[i] = INTEGER.getOID(); coldbtype[i] = DBType.INTEGER; } else if (enumType == DBType.REAL.ordinal()) { colType[i] = REAL.getOID(); coldbtype[i] = DBType.REAL; } else if (enumType == DBType.SMALLINT.ordinal()) { colType[i] = SMALLINT.getOID(); coldbtype[i] = DBType.SMALLINT; } else if (enumType == DBType.BYTEA.ordinal()) { colType[i] = BYTEA.getOID(); coldbtype[i] = DBType.BYTEA; } else if (enumType == DBType.TEXT.ordinal()) { colType[i] = TEXT.getOID(); coldbtype[i] = DBType.TEXT; } else { throw new IOException("Unknown GPDBWritable.DBType ordinal value"); } } /* Extract null bit array */ byte[] nullbytes = new byte[getNullByteArraySize(colCnt)]; in.readFully(nullbytes); curOffset += nullbytes.length; boolean[] colIsNull = byteArrayToBooleanArray(nullbytes, colCnt); /* extract column value */ colValue = new Object[colCnt]; for (int i = 0; i < colCnt; i++) { if (!colIsNull[i]) { /* Skip the alignment padding */ int skipbytes = roundUpAlignment(curOffset, coldbtype[i].getAlignment()) - curOffset; for (int j = 0; j < skipbytes; j++) { in.readByte(); } curOffset += skipbytes; /* For fixed length type, increment the offset according to type type length here. * For var length type (BYTEA, TEXT), we'll read 4 byte length header and the * actual payload. */ int varcollen = -1; if (coldbtype[i].isVarLength()) { varcollen = in.readInt(); curOffset += 4 + varcollen; } else { curOffset += coldbtype[i].getTypeLength(); } switch (DataType.get(colType[i])) { case BIGINT: { colValue[i] = in.readLong(); break; } case BOOLEAN: { colValue[i] = in.readBoolean(); break; } case FLOAT8: { colValue[i] = in.readDouble(); break; } case INTEGER: { colValue[i] = in.readInt(); break; } case REAL: { colValue[i] = in.readFloat(); break; } case SMALLINT: { colValue[i] = in.readShort(); break; } /* For BYTEA column, it has a 4 byte var length header. */ case BYTEA: { colValue[i] = new byte[varcollen]; in.readFully((byte[]) colValue[i]); break; } /* For text formatted column, it has a 4 byte var length header * and it's always null terminated string. * So, we can remove the last "\0" when constructing the string. */ case TEXT: { byte[] data = new byte[varcollen]; in.readFully(data, 0, varcollen); colValue[i] = new String(data, 0, varcollen - 1, CHARSET); break; } default: throw new IOException("Unknown GPDBWritable ColType"); } } } /* Skip the ending alignment padding */ int skipbytes = roundUpAlignment(curOffset, 8) - curOffset; for (int j = 0; j < skipbytes; j++) { in.readByte(); } curOffset += skipbytes; if (errorFlag != 0) { throw new IOException("Received error value " + errorFlag + " from format"); } }
From source file:org.apache.mahout.classifier.sgd.TPrior.java
@Override public void readFields(DataInput in) throws IOException { df = in.readDouble(); }
From source file:org.apache.marmotta.kiwi.io.KiWiIO.java
/** * Read a KiWiDoubleLiteral serialized with writeDoubleLiteral from a DataInput source * * @param input the source/*from www. j av a2 s .co m*/ * @return the de-serialized KiWiDoubleLiteral * @throws IOException */ public static KiWiDoubleLiteral readDoubleLiteral(DataInput input) throws IOException { long id = input.readLong(); if (id == -1) { return null; } else { double content = input.readDouble(); KiWiUriResource dtype = readURI(input); Date created = new Date(input.readLong()); KiWiDoubleLiteral r = new KiWiDoubleLiteral(content, dtype, created); r.setId(id); return r; } }
From source file:org.apache.pig.data.BinInterSedes.java
/** * Expects binInterSedes data types (NOT DataType types!) * <p>/*from ww w . ja v a 2s. c o m*/ * * @see org.apache.pig.data.InterSedes#readDatum(java.io.DataInput, byte) */ @Override public Object readDatum(DataInput in, byte type) throws IOException, ExecException { switch (type) { case TUPLE_0: case TUPLE_1: case TUPLE_2: case TUPLE_3: case TUPLE_4: case TUPLE_5: case TUPLE_6: case TUPLE_7: case TUPLE_8: case TUPLE_9: case TUPLE: case TINYTUPLE: case SMALLTUPLE: return SedesHelper.readGenericTuple(in, type); case BAG: case TINYBAG: case SMALLBAG: return readBag(in, type); case MAP: case TINYMAP: case SMALLMAP: return readMap(in, type); case INTERNALMAP: return readInternalMap(in); case INTEGER_0: return Integer.valueOf(0); case INTEGER_1: return Integer.valueOf(1); case INTEGER_INBYTE: return Integer.valueOf(in.readByte()); case INTEGER_INSHORT: return Integer.valueOf(in.readShort()); case INTEGER: return Integer.valueOf(in.readInt()); case LONG_0: return Long.valueOf(0); case LONG_1: return Long.valueOf(1); case LONG_INBYTE: return Long.valueOf(in.readByte()); case LONG_INSHORT: return Long.valueOf(in.readShort()); case LONG_ININT: return Long.valueOf(in.readInt()); case LONG: return Long.valueOf(in.readLong()); case DATETIME: return new DateTime(in.readLong(), DateTimeZone.forOffsetMillis(in.readShort() * ONE_MINUTE)); case FLOAT: return Float.valueOf(in.readFloat()); case DOUBLE: return Double.valueOf(in.readDouble()); case BIGINTEGER: return readBigInteger(in); case BIGDECIMAL: return readBigDecimal(in); case BOOLEAN_TRUE: return Boolean.valueOf(true); case BOOLEAN_FALSE: return Boolean.valueOf(false); case BYTE: return Byte.valueOf(in.readByte()); case TINYBYTEARRAY: case SMALLBYTEARRAY: case BYTEARRAY: return new DataByteArray(SedesHelper.readBytes(in, type)); case CHARARRAY: case SMALLCHARARRAY: return SedesHelper.readChararray(in, type); case GENERIC_WRITABLECOMPARABLE: return readWritable(in); case SCHEMA_TUPLE_BYTE_INDEX: case SCHEMA_TUPLE_SHORT_INDEX: case SCHEMA_TUPLE: return readSchemaTuple(in, type); case NULL: return null; default: throw new RuntimeException("Unexpected data type " + type + " found in stream."); } }
From source file:org.apache.pig.data.DataReaderWriter.java
public static Object readDatum(DataInput in, byte type) throws IOException, ExecException { switch (type) { case DataType.TUPLE: return bytesToTuple(in); case DataType.BAG: return bytesToBag(in); case DataType.MAP: return bytesToMap(in); case DataType.INTERNALMAP: return bytesToInternalMap(in); case DataType.INTEGER: return Integer.valueOf(in.readInt()); case DataType.LONG: return Long.valueOf(in.readLong()); case DataType.FLOAT: return Float.valueOf(in.readFloat()); case DataType.DOUBLE: return Double.valueOf(in.readDouble()); case DataType.BOOLEAN: return Boolean.valueOf(in.readBoolean()); case DataType.BYTE: return Byte.valueOf(in.readByte()); case DataType.BYTEARRAY: { int size = in.readInt(); byte[] ba = new byte[size]; in.readFully(ba);//from w ww. ja v a 2s .c o m return new DataByteArray(ba); } case DataType.BIGCHARARRAY: return bytesToBigCharArray(in); case DataType.CHARARRAY: return bytesToCharArray(in); case DataType.GENERIC_WRITABLECOMPARABLE: return bytesToWritable(in); case DataType.NULL: return null; default: throw new RuntimeException("Unexpected data type " + type + " found in stream."); } }