List of usage examples for java.io DataInput readBoolean
boolean readBoolean() throws IOException;
From source file:org.apache.gora.infinispan.query.InfinispanQuery.java
@Override @SuppressWarnings("unchecked") public void readFields(DataInput in) throws IOException { super.readFields(in); sortingField = WritableUtils.readString(in); isAscendant = in.readBoolean(); String locationString = WritableUtils.readString(in); if (!locationString.equals("")) location = new InetSocketAddress(locationString.split(ADDR_DELIMITATOR)[0], Integer.valueOf(locationString.split(ADDR_DELIMITATOR)[1])); }
From source file:org.apache.gora.query.impl.QueryBase.java
@Override @SuppressWarnings("unchecked") public void readFields(DataInput in) throws IOException { String dataStoreClass = Text.readString(in); try {//from ww w .ja va2s . c om dataStore = (DataStoreBase<K, T>) ReflectionUtils .newInstance(ClassLoadingUtils.loadClass(dataStoreClass), conf); dataStore.readFields(in); } catch (ClassNotFoundException ex) { throw new IOException(ex); } boolean[] nullFields = IOUtils.readNullFieldsInfo(in); if (!nullFields[0]) queryString = Text.readString(in); if (!nullFields[1]) fields = IOUtils.readStringArray(in); if (!nullFields[2]) startKey = IOUtils.deserialize(getConf(), in, null, dataStore.getKeyClass()); if (!nullFields[3]) endKey = IOUtils.deserialize(getConf(), in, null, dataStore.getKeyClass()); if (!nullFields[4]) { String filterClass = Text.readString(in); try { filter = (Filter<K, T>) ReflectionUtils.newInstance(ClassLoadingUtils.loadClass(filterClass), conf); filter.readFields(in); } catch (ClassNotFoundException e) { throw new IOException(e); } } startTime = WritableUtils.readVLong(in); endTime = WritableUtils.readVLong(in); limit = WritableUtils.readVLong(in); localFilterEnabled = in.readBoolean(); }
From source file:org.apache.hadoop.hbase.ccindex.TimeRangeFilter.java
public void readFields(final DataInput in) throws IOException { this.columnFamily = Bytes.readByteArray(in); if (this.columnFamily.length == 0) { this.columnFamily = null; }//ww w .j a v a 2s.c o m this.columnQualifier = Bytes.readByteArray(in); if (this.columnQualifier.length == 0) { this.columnQualifier = null; } this.startTs = in.readLong(); this.endTs = in.readLong(); this.foundColumn = in.readBoolean(); this.filterIfMissing = in.readBoolean(); this.latestVersionOnly = in.readBoolean(); }
From source file:org.apache.hadoop.hbase.hbql.filter.PageFilter.java
public void readFields(final DataInput in) throws IOException { Configuration conf = HBaseConfiguration.create(); this.pageSize = in.readLong(); this.verbose = in.readBoolean(); this.filter = (Filter) HbaseObjectWritable.readObject(in, conf); }
From source file:org.apache.hadoop.hbase.hbql.filter.RecordFilter.java
public void readFields(DataInput in) throws IOException { try {//ww w.j a v a2 s .c om this.verbose = in.readBoolean(); final byte[] b = Bytes.readByteArray(in); this.expressionTree = (ExpressionTree) IO.getSerialization().getScalarFromBytes(FieldType.ObjectType, b); this.getHRecord().setMappingContext(this.getExpressionTree().getMappingContext()); this.getMapping().resetDefaultValues(); } catch (HBqlException e) { e.printStackTrace(); Utils.logException(LOG, e); throw new IOException(e.getCause()); } }
From source file:org.apache.hadoop.hbase.hbql.filter.SingleColumnValueFilter.java
public void readFields(final DataInput in) throws IOException { this.verbose = in.readBoolean(); this.columnFamily = Bytes.readByteArray(in); if (this.columnFamily.length == 0) { this.columnFamily = null; }//from ww w . jav a2 s. co m this.columnQualifier = Bytes.readByteArray(in); if (this.columnQualifier.length == 0) { this.columnQualifier = null; } this.compareOp = CompareFilter.CompareOp.valueOf(in.readUTF()); this.comparator = (WritableByteArrayComparable) HbaseObjectWritable.readObject(in, null); this.foundColumn = in.readBoolean(); this.matchedColumn = in.readBoolean(); this.filterIfMissing = in.readBoolean(); this.latestVersionOnly = in.readBoolean(); }
From source file:org.apache.hadoop.hbase.HRegionInfo.java
/** * @deprecated Use protobuf deserialization instead. * @see #parseFrom(byte[])/*from w w w .j a v a2s .c om*/ */ @Deprecated public void readFields(DataInput in) throws IOException { // Read the single version byte. We don't ask the super class do it // because freaks out if its not the current classes' version. This method // can deserialize version 0 and version 1 of HRI. byte version = in.readByte(); if (version == 0) { // This is the old HRI that carried an HTD. Migrate it. The below // was copied from the old 0.90 HRI readFields. this.endKey = Bytes.readByteArray(in); this.offLine = in.readBoolean(); this.regionId = in.readLong(); this.regionName = Bytes.readByteArray(in); this.split = in.readBoolean(); this.startKey = Bytes.readByteArray(in); try { HTableDescriptor htd = new HTableDescriptor(); htd.readFields(in); this.tableName = htd.getTableName(); } catch (EOFException eofe) { throw new IOException("HTD not found in input buffer", eofe); } this.hashCode = in.readInt(); } else if (getVersion() == version) { this.endKey = Bytes.readByteArray(in); this.offLine = in.readBoolean(); this.regionId = in.readLong(); this.regionName = Bytes.readByteArray(in); this.split = in.readBoolean(); this.startKey = Bytes.readByteArray(in); this.tableName = TableName.valueOf(Bytes.readByteArray(in)); this.hashCode = in.readInt(); } else { throw new IOException("Non-migratable/unknown version=" + getVersion()); } }
From source file:org.apache.hadoop.hbase.HTableDescriptor.java
/** * <em> INTERNAL </em> This method is a part of {@link WritableComparable} interface * and is used for de-serialization of the HTableDescriptor over RPC * @deprecated Writables are going away. Use pb {@link #parseFrom(byte[])} instead. *//*from www . j a v a 2 s. com*/ @Deprecated @Override public void readFields(DataInput in) throws IOException { int version = in.readInt(); if (version < 3) throw new IOException("versions < 3 are not supported (and never existed!?)"); // version 3+ name = TableName.valueOf(Bytes.readByteArray(in)); setRootRegion(in.readBoolean()); setMetaRegion(in.readBoolean()); values.clear(); configuration.clear(); int numVals = in.readInt(); for (int i = 0; i < numVals; i++) { ImmutableBytesWritable key = new ImmutableBytesWritable(); ImmutableBytesWritable value = new ImmutableBytesWritable(); key.readFields(in); value.readFields(in); setValue(key, value); } families.clear(); int numFamilies = in.readInt(); for (int i = 0; i < numFamilies; i++) { HColumnDescriptor c = new HColumnDescriptor(); c.readFields(in); families.put(c.getName(), c); } if (version >= 7) { int numConfigs = in.readInt(); for (int i = 0; i < numConfigs; i++) { ImmutableBytesWritable key = new ImmutableBytesWritable(); ImmutableBytesWritable value = new ImmutableBytesWritable(); key.readFields(in); value.readFields(in); configuration.put(Bytes.toString(key.get(), key.getOffset(), key.getLength()), Bytes.toString(value.get(), value.getOffset(), value.getLength())); } } }
From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java
/** * Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding./*from w w w. ja va2s . co m*/ * @param in * @param objectWritable * @param conf * @return the object * @throws IOException */ @SuppressWarnings("unchecked") public static Object readObject(DataInput in, HbaseObjectWritable objectWritable, Configuration conf) throws IOException { Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); Object instance; if (declaredClass.isPrimitive()) { // primitive types if (declaredClass == Boolean.TYPE) { // boolean instance = Boolean.valueOf(in.readBoolean()); } else if (declaredClass == Character.TYPE) { // char instance = Character.valueOf(in.readChar()); } else if (declaredClass == Byte.TYPE) { // byte instance = Byte.valueOf(in.readByte()); } else if (declaredClass == Short.TYPE) { // short instance = Short.valueOf(in.readShort()); } else if (declaredClass == Integer.TYPE) { // int instance = Integer.valueOf(in.readInt()); } else if (declaredClass == Long.TYPE) { // long instance = Long.valueOf(in.readLong()); } else if (declaredClass == Float.TYPE) { // float instance = Float.valueOf(in.readFloat()); } else if (declaredClass == Double.TYPE) { // double instance = Double.valueOf(in.readDouble()); } else if (declaredClass == Void.TYPE) { // void instance = null; } else { throw new IllegalArgumentException("Not a primitive: " + declaredClass); } } else if (declaredClass.isArray()) { // array if (declaredClass.equals(byte[].class)) { instance = Bytes.readByteArray(in); } else if (declaredClass.equals(Result[].class)) { instance = Result.readArray(in); } else { int length = in.readInt(); instance = Array.newInstance(declaredClass.getComponentType(), length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE Class<?> componentType = readClass(conf, in); int length = in.readInt(); instance = Array.newInstance(componentType, length); for (int i = 0; i < length; i++) { Array.set(instance, i, readObject(in, conf)); } } else if (List.class.isAssignableFrom(declaredClass)) { // List int length = in.readInt(); instance = new ArrayList(length); for (int i = 0; i < length; i++) { ((ArrayList) instance).add(readObject(in, conf)); } } else if (declaredClass == String.class) { // String instance = Text.readString(in); } else if (declaredClass.isEnum()) { // enum instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in)); } else if (declaredClass == Message.class) { String className = Text.readString(in); try { declaredClass = getClassByName(conf, className); instance = tryInstantiateProtobuf(declaredClass, in); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { // Writable or Serializable Class instanceClass = null; int b = (byte) WritableUtils.readVInt(in); if (b == NOT_ENCODED) { String className = Text.readString(in); try { instanceClass = getClassByName(conf, className); } catch (ClassNotFoundException e) { LOG.error("Can't find class " + className, e); throw new IOException("Can't find class " + className, e); } } else { instanceClass = CODE_TO_CLASS.get(b); } if (Writable.class.isAssignableFrom(instanceClass)) { Writable writable = WritableFactories.newInstance(instanceClass, conf); try { writable.readFields(in); } catch (Exception e) { LOG.error("Error in readFields", e); throw new IOException("Error in readFields", e); } instance = writable; if (instanceClass == NullInstance.class) { // null declaredClass = ((NullInstance) instance).declaredClass; instance = null; } } else { int length = in.readInt(); byte[] objectBytes = new byte[length]; in.readFully(objectBytes); ByteArrayInputStream bis = null; ObjectInputStream ois = null; try { bis = new ByteArrayInputStream(objectBytes); ois = new ObjectInputStream(bis); instance = ois.readObject(); } catch (ClassNotFoundException e) { LOG.error("Class not found when attempting to deserialize object", e); throw new IOException("Class not found when attempting to " + "deserialize object", e); } finally { if (bis != null) bis.close(); if (ois != null) ois.close(); } } } if (objectWritable != null) { // store values objectWritable.declaredClass = declaredClass; objectWritable.instance = instance; } return instance; }
From source file:org.apache.hadoop.hbase.regionserver.wal.HLogKey.java
@Override public void readFields(DataInput in) throws IOException { Version version = Version.UNVERSIONED; // HLogKey was not versioned in the beginning. // In order to introduce it now, we make use of the fact // that encodedRegionName was written with Bytes.writeByteArray, // which encodes the array length as a vint which is >= 0. // Hence if the vint is >= 0 we have an old version and the vint // encodes the length of encodedRegionName. // If < 0 we just read the version and the next vint is the length. // @see Bytes#readByteArray(DataInput) this.scopes = null; // writable HLogKey does not contain scopes int len = WritableUtils.readVInt(in); byte[] tablenameBytes = null; if (len < 0) { // what we just read was the version version = Version.fromCode(len); // We only compress V2 of HLogkey. // If compression is on, the length is handled by the dictionary if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) { len = WritableUtils.readVInt(in); }//from w ww . j a va2s .c om } if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) { this.encodedRegionName = new byte[len]; in.readFully(this.encodedRegionName); tablenameBytes = Bytes.readByteArray(in); } else { this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict); tablenameBytes = Compressor.readCompressed(in, compressionContext.tableDict); } this.logSeqNum = in.readLong(); this.writeTime = in.readLong(); this.clusterIds.clear(); if (version.atLeast(Version.INITIAL)) { if (in.readBoolean()) { // read the older log // Definitely is the originating cluster clusterIds.add(new UUID(in.readLong(), in.readLong())); } } else { try { // dummy read (former byte cluster id) in.readByte(); } catch (EOFException e) { // Means it's a very old key, just continue } } try { this.tablename = TableName.valueOf(tablenameBytes); } catch (IllegalArgumentException iae) { if (Bytes.toString(tablenameBytes).equals(TableName.OLD_META_STR)) { // It is a pre-namespace meta table edit, continue with new format. LOG.info("Got an old .META. edit, continuing with new format "); this.tablename = TableName.META_TABLE_NAME; this.encodedRegionName = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes(); } else if (Bytes.toString(tablenameBytes).equals(TableName.OLD_ROOT_STR)) { this.tablename = TableName.OLD_ROOT_TABLE_NAME; throw iae; } else throw iae; } // Do not need to read the clusters information as we are using protobufs from 0.95 }