Example usage for java.io DataInput readLong

List of usage examples for java.io DataInput readLong

Introduction

In this page you can find the example usage for java.io DataInput readLong.

Prototype

long readLong() throws IOException;

Source Link

Document

Reads eight input bytes and returns a long value.

Usage

From source file:org.apache.geode.internal.InternalDataSerializer.java

/** read a set of Long objects */
public static List<Long> readListOfLongs(DataInput in) throws IOException {
    int size = in.readInt();
    if (size < 0) {
        return null;
    } else {// www  .  j  av  a 2s . co m
        List result = new LinkedList();
        boolean longIDs = in.readBoolean();
        for (int i = 0; i < size; i++) {
            long l = longIDs ? in.readLong() : in.readInt();
            result.add(l);
        }
        return result;
    }
}

From source file:org.apache.geode.internal.InternalDataSerializer.java

/**
 * Write a variable length long the old way (pre 7.0). Use this only in contexts where you might
 * need to communicate with pre 7.0 members or files.
 *///from   w w  w . j a v a 2  s  . c o m
public static long readVLOld(DataInput in) throws IOException {
    byte code = in.readByte();
    long result;
    if (code < 0) {
        // mask off sign bit
        result = code & 0x7F;
        result <<= 8;
        result |= in.readByte() & 0xFF;
    } else if (code <= MAX_BYTE_VL) {
        result = code;
    } else if (code == INT_VL) {
        result = in.readInt();
    } else if (code == LONG_VL) {
        result = in.readLong();
    } else {
        throw new IllegalStateException("unexpected variable length code=" + code);
    }
    return result;
}

From source file:org.apache.hadoop.hbase.ccindex.TimeRangeFilter.java

public void readFields(final DataInput in) throws IOException {
    this.columnFamily = Bytes.readByteArray(in);
    if (this.columnFamily.length == 0) {
        this.columnFamily = null;
    }// w  ww.j  a v  a2 s  .c  o m
    this.columnQualifier = Bytes.readByteArray(in);
    if (this.columnQualifier.length == 0) {
        this.columnQualifier = null;
    }
    this.startTs = in.readLong();
    this.endTs = in.readLong();

    this.foundColumn = in.readBoolean();
    this.filterIfMissing = in.readBoolean();
    this.latestVersionOnly = in.readBoolean();
}

From source file:org.apache.hadoop.hbase.coprocessor.GroupByStatsValues.java

@SuppressWarnings("unchecked")
@Override/*  w  ww. jav a 2 s  .co  m*/
public void readFields(DataInput in) throws IOException {
    count = in.readLong();
    missing = in.readLong();

    ObjectWritable ow = new ObjectWritable();
    ow.readFields(in);
    min = (T) ow.get();
    ow = new ObjectWritable();
    ow.readFields(in);
    max = (T) ow.get();
    ow = new ObjectWritable();
    ow.readFields(in);
    sum = (S) ow.get();
    ow = new ObjectWritable();
    ow.readFields(in);
    sumOfSquares = (S) ow.get();

    String ciClassName = WritableUtils.readString(in);
    try {
        ci = (ColumnInterpreter<T, S>) Class.forName(ciClassName).newInstance();
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:org.apache.hadoop.hbase.hbql.filter.PageFilter.java

public void readFields(final DataInput in) throws IOException {
    Configuration conf = HBaseConfiguration.create();
    this.pageSize = in.readLong();
    this.verbose = in.readBoolean();
    this.filter = (Filter) HbaseObjectWritable.readObject(in, conf);
}

From source file:org.apache.hadoop.hbase.HRegionInfo.java

/**
 * @deprecated Use protobuf deserialization instead.
 * @see #parseFrom(byte[])/*from   w  ww .  j a  va 2  s  .c  o  m*/
 */
@Deprecated
public void readFields(DataInput in) throws IOException {
    // Read the single version byte.  We don't ask the super class do it
    // because freaks out if its not the current classes' version.  This method
    // can deserialize version 0 and version 1 of HRI.
    byte version = in.readByte();
    if (version == 0) {
        // This is the old HRI that carried an HTD.  Migrate it.  The below
        // was copied from the old 0.90 HRI readFields.
        this.endKey = Bytes.readByteArray(in);
        this.offLine = in.readBoolean();
        this.regionId = in.readLong();
        this.regionName = Bytes.readByteArray(in);
        this.split = in.readBoolean();
        this.startKey = Bytes.readByteArray(in);
        try {
            HTableDescriptor htd = new HTableDescriptor();
            htd.readFields(in);
            this.tableName = htd.getTableName();
        } catch (EOFException eofe) {
            throw new IOException("HTD not found in input buffer", eofe);
        }
        this.hashCode = in.readInt();
    } else if (getVersion() == version) {
        this.endKey = Bytes.readByteArray(in);
        this.offLine = in.readBoolean();
        this.regionId = in.readLong();
        this.regionName = Bytes.readByteArray(in);
        this.split = in.readBoolean();
        this.startKey = Bytes.readByteArray(in);
        this.tableName = TableName.valueOf(Bytes.readByteArray(in));
        this.hashCode = in.readInt();
    } else {
        throw new IOException("Non-migratable/unknown version=" + getVersion());
    }
}

From source file:org.apache.hadoop.hbase.HServerInfo.java

public void readFields(DataInput in) throws IOException {
    this.serverAddress.readFields(in);
    this.startCode = in.readLong();
    this.load.readFields(in);
    in.readInt();/*  w  w  w . j  a  v  a  2 s .c  om*/
    this.hostname = in.readUTF();
    if (sendSequenceIds) {
        HbaseMapWritable<byte[], Long> sequenceIdsWritable = new HbaseMapWritable<byte[], Long>(
                flushedSequenceIdByRegion);
        sequenceIdsWritable.readFields(in);
    }
}

From source file:org.apache.hadoop.hbase.index.IndexSpecification.java

/**
 * @param Data Input Stream/* ww w  .  j av a2  s  .  c om*/
 * @throws IOException
 */
public void readFields(DataInput in) throws IOException {
    this.name = Bytes.readByteArray(in);
    try {
        HTableDescriptor.isLegalTableName(this.name);
    } catch (IllegalArgumentException e) {
        String msg = "Received unexpected data while parsing the column qualifiers :"
                + Bytes.toString(this.name) + ".";
        Log.warn(msg + " Could be an non-indexed table.");
        throw new EOFException(msg);
    }
    int indexColsSize = in.readInt();
    indexColumns.clear();
    for (int i = 0; i < indexColsSize; i++) {
        ColumnQualifier cq = new ColumnQualifier();
        // Need to revisit this place. May be some other valid value though invalid
        // comes up.
        try {
            cq.readFields(in);
        } catch (IllegalArgumentException e) {
            throw new EOFException("Received unexpected data while parsing the column qualifiers.");
        }
        internalAdd(cq);
    }
    this.maxVersions = in.readInt();
    this.ttl = in.readLong();
}

From source file:org.apache.hadoop.hbase.io.BatchUpdate.java

public void readFields(final DataInput in) throws IOException {
    // Clear any existing operations; may be hangovers from previous use of
    // this instance.
    if (this.operations.size() != 0) {
        this.operations.clear();
    }//ww w. j  a v a  2 s.c om
    this.row = Bytes.readByteArray(in);
    timestamp = in.readLong();
    this.size = in.readLong();
    int nOps = in.readInt();
    for (int i = 0; i < nOps; i++) {
        BatchOperation op = new BatchOperation();
        op.readFields(in);
        this.operations.add(op);
    }
    this.rowLock = in.readLong();
}

From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java

/**
 * Read a {@link Writable}, {@link String}, primitive type, or an array of
 * the preceding./*from w  ww  . j ava2 s .  c o m*/
 * @param in
 * @param objectWritable
 * @param conf
 * @return the object
 * @throws IOException
 */
@SuppressWarnings("unchecked")
public static Object readObject(DataInput in, HbaseObjectWritable objectWritable, Configuration conf)
        throws IOException {
    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
    Object instance;
    if (declaredClass.isPrimitive()) { // primitive types
        if (declaredClass == Boolean.TYPE) { // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) { // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) { // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) { // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) { // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) { // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) { // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) { // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) { // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }
    } else if (declaredClass.isArray()) { // array
        if (declaredClass.equals(byte[].class)) {
            instance = Bytes.readByteArray(in);
        } else if (declaredClass.equals(Result[].class)) {
            instance = Result.readArray(in);
        } else {
            int length = in.readInt();
            instance = Array.newInstance(declaredClass.getComponentType(), length);
            for (int i = 0; i < length; i++) {
                Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE
        Class<?> componentType = readClass(conf, in);
        int length = in.readInt();
        instance = Array.newInstance(componentType, length);
        for (int i = 0; i < length; i++) {
            Array.set(instance, i, readObject(in, conf));
        }
    } else if (List.class.isAssignableFrom(declaredClass)) { // List
        int length = in.readInt();
        instance = new ArrayList(length);
        for (int i = 0; i < length; i++) {
            ((ArrayList) instance).add(readObject(in, conf));
        }
    } else if (declaredClass == String.class) { // String
        instance = Text.readString(in);
    } else if (declaredClass.isEnum()) { // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
    } else if (declaredClass == Message.class) {
        String className = Text.readString(in);
        try {
            declaredClass = getClassByName(conf, className);
            instance = tryInstantiateProtobuf(declaredClass, in);
        } catch (ClassNotFoundException e) {
            LOG.error("Can't find class " + className, e);
            throw new IOException("Can't find class " + className, e);
        }
    } else { // Writable or Serializable
        Class instanceClass = null;
        int b = (byte) WritableUtils.readVInt(in);
        if (b == NOT_ENCODED) {
            String className = Text.readString(in);
            try {
                instanceClass = getClassByName(conf, className);
            } catch (ClassNotFoundException e) {
                LOG.error("Can't find class " + className, e);
                throw new IOException("Can't find class " + className, e);
            }
        } else {
            instanceClass = CODE_TO_CLASS.get(b);
        }
        if (Writable.class.isAssignableFrom(instanceClass)) {
            Writable writable = WritableFactories.newInstance(instanceClass, conf);
            try {
                writable.readFields(in);
            } catch (Exception e) {
                LOG.error("Error in readFields", e);
                throw new IOException("Error in readFields", e);
            }
            instance = writable;
            if (instanceClass == NullInstance.class) { // null
                declaredClass = ((NullInstance) instance).declaredClass;
                instance = null;
            }
        } else {
            int length = in.readInt();
            byte[] objectBytes = new byte[length];
            in.readFully(objectBytes);
            ByteArrayInputStream bis = null;
            ObjectInputStream ois = null;
            try {
                bis = new ByteArrayInputStream(objectBytes);
                ois = new ObjectInputStream(bis);
                instance = ois.readObject();
            } catch (ClassNotFoundException e) {
                LOG.error("Class not found when attempting to deserialize object", e);
                throw new IOException("Class not found when attempting to " + "deserialize object", e);
            } finally {
                if (bis != null)
                    bis.close();
                if (ois != null)
                    ois.close();
            }
        }
    }
    if (objectWritable != null) { // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }
    return instance;
}