List of usage examples for java.io DataInput readByte
byte readByte() throws IOException;
From source file:org.apache.geode.pdx.internal.PdxInstanceImpl.java
private static PdxInputStream createDis(DataInput in, int len) { PdxInputStream dis;//w w w . j av a2s .co m if (in instanceof PdxInputStream) { dis = new PdxInputStream((ByteBufferInputStream) in, len); try { int bytesSkipped = in.skipBytes(len); int bytesRemaining = len - bytesSkipped; while (bytesRemaining > 0) { in.readByte(); bytesRemaining--; } } catch (IOException ex) { throw new PdxSerializationException("Could not deserialize PDX", ex); } } else { byte[] bytes = new byte[len]; try { in.readFully(bytes); } catch (IOException ex) { throw new PdxSerializationException("Could not deserialize PDX", ex); } dis = new PdxInputStream(bytes); } return dis; }
From source file:org.apache.hadoop.hbase.hbql.filter.RecordFilterList.java
public void readFields(final DataInput in) throws IOException { Configuration conf = HBaseConfiguration.create(); byte opByte = in.readByte(); operator = Operator.values()[opByte]; int size = in.readInt(); if (size > 0) { filters = new ArrayList<Filter>(size); for (int i = 0; i < size; i++) { Filter filter = (Filter) HbaseObjectWritable.readObject(in, conf); filters.add(filter);//from w w w . j ava 2 s .c o m } } }
From source file:org.apache.hadoop.hbase.HRegionInfo.java
/** * @deprecated Use protobuf deserialization instead. * @see #parseFrom(byte[])/*w ww . ja v a2 s.co m*/ */ @Deprecated public void readFields(DataInput in) throws IOException { // Read the single version byte. We don't ask the super class do it // because freaks out if its not the current classes' version. This method // can deserialize version 0 and version 1 of HRI. byte version = in.readByte(); if (version == 0) { // This is the old HRI that carried an HTD. Migrate it. The below // was copied from the old 0.90 HRI readFields. this.endKey = Bytes.readByteArray(in); this.offLine = in.readBoolean(); this.regionId = in.readLong(); this.regionName = Bytes.readByteArray(in); this.split = in.readBoolean(); this.startKey = Bytes.readByteArray(in); try { HTableDescriptor htd = new HTableDescriptor(); htd.readFields(in); this.tableName = htd.getTableName(); } catch (EOFException eofe) { throw new IOException("HTD not found in input buffer", eofe); } this.hashCode = in.readInt(); } else if (getVersion() == version) { this.endKey = Bytes.readByteArray(in); this.offLine = in.readBoolean(); this.regionId = in.readLong(); this.regionName = Bytes.readByteArray(in); this.split = in.readBoolean(); this.startKey = Bytes.readByteArray(in); this.tableName = TableName.valueOf(Bytes.readByteArray(in)); this.hashCode = in.readInt(); } else { throw new IOException("Non-migratable/unknown version=" + getVersion()); } }
From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java
/** * Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding.// www.jav a 2s.c o m * @param in * @param objectWritable * @param conf * @return the object * @throws IOException */ @SuppressWarnings("unchecked") public static Object readObject(DataInput in, HbaseObjectWritable objectWritable, Configuration conf) throws IOException { Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); Object instance; if (declaredClass.isPrimitive()) { // primitive types if (declaredClass == Boolean.TYPE) { // boolean instance = Boolean.valueOf(in.readBoolean()); } else if (declaredClass == Character.TYPE) { // char instance = Character.valueOf(in.readChar()); } else if (declaredClass == Byte.TYPE) { // byte instance = Byte.valueOf(in.readByte()); } else if (declaredClass == Short.TYPE) { // short instance = Short.valueOf(in.readShort()); } else if (declaredClass == Integer.TYPE) { // int instance = Integer.valueOf(in.readInt()); } else if (declaredClass == Long.TYPE) { // long instance = Long.valueOf(in.readLong()); } else if (declaredClass == Float.TYPE) { // float instance = Float.valueOf(in.readFloat()); } else if (declaredClass == Double.TYPE) { // double instance = Double.valueOf(in.readDouble()); } else if (declaredClass == Void.TYPE) { // void instance = null; } else { throw new IllegalArgumentException("Not a primitive: " + declaredClass); } } else if (declaredClass.isArray()) { // array if (declaredClass.equals(byte[].class)) { instance = Bytes.readByteArray(in); } else if (declaredClass.equals(Result[].class)) { instance = Result.readArray(in); } else { int length = in.readInt(); instance = Array.newInstance(declaredClass.getComponentType(), length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE Class<?> componentType = readClass(conf, in); int length = in.readInt(); instance = Array.newInstance(componentType, length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } else if (List.class.isAssignableFrom(declaredClass)) { // List int length = in.readInt(); instance = new ArrayList(length); for (int i = 0; i < length; i++) { ((ArrayList) instance).add(readObject(in, conf)); } } else if (declaredClass == String.class) { // String instance = Text.readString(in); } else if (declaredClass.isEnum()) { // enum instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in)); } else if (declaredClass == Message.class) { String className = Text.readString(in); try { declaredClass = getClassByName(conf, className); instance = tryInstantiateProtobuf(declaredClass, in); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { // Writable or Serializable Class instanceClass = null; int b = (byte) WritableUtils.readVInt(in); if (b == NOT_ENCODED) { String className = Text.readString(in); try { instanceClass = getClassByName(conf, className); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { instanceClass = CODE_TO_CLASS.get(b); } if (Writable.class.isAssignableFrom(instanceClass)) { Writable writable = WritableFactories.newInstance(instanceClass, conf); try { writable.readFields(in); } catch (Exception e) { LOG.error("Error in readFields", e); throw new IOException("Error in readFields", e); } instance = writable; if (instanceClass == NullInstance.class) { // null declaredClass = ((NullInstance) instance).declaredClass; instance = null; } } else { int length = in.readInt(); byte[] objectBytes = new byte[length]; in.readFully(objectBytes); ByteArrayInputStream bis = null; ObjectInputStream ois = null; try { bis = new ByteArrayInputStream(objectBytes); ois = new ObjectInputStream(bis); instance = ois.readObject(); } catch (ClassNotFoundException e) { LOG.error("Class not found when attempting to deserialize object", e); throw new IOException("Class not found when attempting to " + "deserialize object", e); } finally { if (bis != null) bis.close(); if (ois != null) ois.close(); } } } if (objectWritable != null) { // store values objectWritable.declaredClass = declaredClass; objectWritable.instance = instance; } return instance; }
From source file:org.apache.hadoop.hbase.regionserver.wal.HLogKey.java
@Override public void readFields(DataInput in) throws IOException { Version version = Version.UNVERSIONED; // HLogKey was not versioned in the beginning. // In order to introduce it now, we make use of the fact // that encodedRegionName was written with Bytes.writeByteArray, // which encodes the array length as a vint which is >= 0. // Hence if the vint is >= 0 we have an old version and the vint // encodes the length of encodedRegionName. // If < 0 we just read the version and the next vint is the length. // @see Bytes#readByteArray(DataInput) this.scopes = null; // writable HLogKey does not contain scopes int len = WritableUtils.readVInt(in); byte[] tablenameBytes = null; if (len < 0) { // what we just read was the version version = Version.fromCode(len); // We only compress V2 of HLogkey. // If compression is on, the length is handled by the dictionary if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) { len = WritableUtils.readVInt(in); }//w w w . java2 s . com } if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) { this.encodedRegionName = new byte[len]; in.readFully(this.encodedRegionName); tablenameBytes = Bytes.readByteArray(in); } else { this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict); tablenameBytes = Compressor.readCompressed(in, compressionContext.tableDict); } this.logSeqNum = in.readLong(); this.writeTime = in.readLong(); this.clusterIds.clear(); if (version.atLeast(Version.INITIAL)) { if (in.readBoolean()) { // read the older log // Definitely is the originating cluster clusterIds.add(new UUID(in.readLong(), in.readLong())); } } else { try { // dummy read (former byte cluster id) in.readByte(); } catch (EOFException e) { // Means it's a very old key, just continue } } try { this.tablename = TableName.valueOf(tablenameBytes); } catch (IllegalArgumentException iae) { if (Bytes.toString(tablenameBytes).equals(TableName.OLD_META_STR)) { // It is a pre-namespace meta table edit, continue with new format. LOG.info("Got an old .META. edit, continuing with new format "); this.tablename = TableName.META_TABLE_NAME; this.encodedRegionName = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes(); } else if (Bytes.toString(tablenameBytes).equals(TableName.OLD_ROOT_STR)) { this.tablename = TableName.OLD_ROOT_TABLE_NAME; throw iae; } else throw iae; } // Do not need to read the clusters information as we are using protobufs from 0.95 }
From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java
/** * Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding./*from www . j av a2s .c o m*/ * @param in * @param objectWritable * @param conf * @return the object * @throws IOException */ @SuppressWarnings("unchecked") static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf) throws IOException { Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); Object instance; if (declaredClass.isPrimitive()) { // primitive types if (declaredClass == Boolean.TYPE) { // boolean instance = Boolean.valueOf(in.readBoolean()); } else if (declaredClass == Character.TYPE) { // char instance = Character.valueOf(in.readChar()); } else if (declaredClass == Byte.TYPE) { // byte instance = Byte.valueOf(in.readByte()); } else if (declaredClass == Short.TYPE) { // short instance = Short.valueOf(in.readShort()); } else if (declaredClass == Integer.TYPE) { // int instance = Integer.valueOf(in.readInt()); } else if (declaredClass == Long.TYPE) { // long instance = Long.valueOf(in.readLong()); } else if (declaredClass == Float.TYPE) { // float instance = Float.valueOf(in.readFloat()); } else if (declaredClass == Double.TYPE) { // double instance = Double.valueOf(in.readDouble()); } else if (declaredClass == Void.TYPE) { // void instance = null; } else { throw new IllegalArgumentException("Not a primitive: " + declaredClass); } } else if (declaredClass.isArray()) { // array if (declaredClass.equals(byte[].class)) { instance = Bytes.readByteArray(in); } else { int length = in.readInt(); instance = Array.newInstance(declaredClass.getComponentType(), length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE Class<?> componentType = readClass(conf, in); int length = in.readInt(); instance = Array.newInstance(componentType, length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } else if (List.class.isAssignableFrom(declaredClass)) { // List int length = in.readInt(); instance = new ArrayList(length); for (int i = 0; i < length; i++) { ((ArrayList) instance).add(readObject(in, conf)); } } else if (declaredClass == String.class) { // String instance = Text.readString(in); } else if (declaredClass.isEnum()) { // enum instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in)); } else if (declaredClass == Message.class) { String className = Text.readString(in); try { declaredClass = getClassByName(conf, className); instance = tryInstantiateProtobuf(declaredClass, in); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else if (Scan.class.isAssignableFrom(declaredClass)) { int length = in.readInt(); byte[] scanBytes = new byte[length]; in.readFully(scanBytes); ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder(); instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build()); } else { // Writable or Serializable Class instanceClass = null; int b = (byte) WritableUtils.readVInt(in); if (b == NOT_ENCODED) { String className = Text.readString(in); try { instanceClass = getClassByName(conf, className); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { instanceClass = CODE_TO_CLASS.get(b); } if (Writable.class.isAssignableFrom(instanceClass)) { Writable writable = WritableFactories.newInstance(instanceClass, conf); try { writable.readFields(in); } catch (Exception e) { LOG.error("Error in readFields", e); throw new IOException("Error in readFields", e); } instance = writable; if (instanceClass == NullInstance.class) { // null declaredClass = ((NullInstance) instance).declaredClass; instance = null; } } else { int length = in.readInt(); byte[] objectBytes = new byte[length]; in.readFully(objectBytes); ByteArrayInputStream bis = null; ObjectInputStream ois = null; try { bis = new ByteArrayInputStream(objectBytes); ois = new ObjectInputStream(bis); instance = ois.readObject(); } catch (ClassNotFoundException e) { LOG.error("Class not found when attempting to deserialize object", e); throw new IOException("Class not found when attempting to " + "deserialize object", e); } finally { if (bis != null) bis.close(); if (ois != null) ois.close(); } } } if (objectWritable != null) { // store values objectWritable.declaredClass = declaredClass; objectWritable.instance = instance; } return instance; }
From source file:org.apache.hadoop.hbase.security.access.Permission.java
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int length = (int) in.readByte(); if (length > 0) { actions = new Action[length]; for (int i = 0; i < length; i++) { byte b = in.readByte(); Action a = ACTION_BY_CODE.get(b); if (a == null) { throw new IOException( "Unknown action code '" + Bytes.toStringBinary(new byte[] { b }) + "' in input"); }/*from w w w . ja va 2 s. c o m*/ this.actions[i] = a; } } else { actions = new Action[0]; } }
From source file:org.apache.hadoop.hive.accumulo.AccumuloHiveRow.java
@Override public void readFields(DataInput dataInput) throws IOException { if (dataInput.readBoolean()) { rowId = dataInput.readUTF();//w ww. j a va 2 s .co m } int size = dataInput.readInt(); for (int i = 0; i < size; i++) { int cfLength = dataInput.readInt(); byte[] cfData = new byte[cfLength]; dataInput.readFully(cfData, 0, cfLength); Text cf = new Text(cfData); int cqLength = dataInput.readInt(); byte[] cqData = new byte[cqLength]; dataInput.readFully(cqData, 0, cqLength); Text cq = new Text(cqData); int valSize = dataInput.readInt(); byte[] val = new byte[valSize]; for (int j = 0; j < valSize; j++) { val[j] = dataInput.readByte(); } tuples.add(new ColumnTuple(cf, cq, val)); } }
From source file:org.apache.hadoop.hive.ql.exec.Utilities.java
public static StreamStatus readColumn(DataInput in, OutputStream out) throws IOException { boolean foundCrChar = false; while (true) { int b;//from w w w . jav a 2s . c om try { b = in.readByte(); } catch (EOFException e) { return StreamStatus.EOF; } if (b == Utilities.newLineCode) { return StreamStatus.TERMINATED; } out.write(b); } // Unreachable }
From source file:org.apache.hadoop.hive.ql.io.orc.OrcSplit.java
@Override public void readFields(DataInput in) throws IOException { //deserialize path, offset, length using FileSplit super.readFields(in); byte flags = in.readByte(); hasFooter = (FOOTER_FLAG & flags) != 0; isOriginal = (ORIGINAL_FLAG & flags) != 0; hasBase = (BASE_FLAG & flags) != 0; boolean hasFileId = (HAS_FILEID_FLAG & flags) != 0; deltas.clear();/*from w w w . ja va2 s . c o m*/ int numDeltas = in.readInt(); for (int i = 0; i < numDeltas; i++) { AcidInputFormat.DeltaMetaData dmd = new AcidInputFormat.DeltaMetaData(); dmd.readFields(in); deltas.add(dmd); } if (hasFooter) { // deserialize FileMetaInfo fields String compressionType = Text.readString(in); int bufferSize = WritableUtils.readVInt(in); int metadataSize = WritableUtils.readVInt(in); // deserialize FileMetaInfo field footer int footerBuffSize = WritableUtils.readVInt(in); ByteBuffer footerBuff = ByteBuffer.allocate(footerBuffSize); in.readFully(footerBuff.array(), 0, footerBuffSize); OrcFile.WriterVersion writerVersion = ReaderImpl.getWriterVersion(WritableUtils.readVInt(in)); fileMetaInfo = new ReaderImpl.FileMetaInfo(compressionType, bufferSize, metadataSize, footerBuff, writerVersion); } if (hasFileId) { fileId = in.readLong(); } }