Example usage for java.io DataInput readLong

List of usage examples for java.io DataInput readLong

Introduction

In this page you can find the example usage for java.io DataInput readLong.

Prototype

long readLong() throws IOException;

Source Link

Document

Reads eight input bytes and returns a long value.

Usage

From source file:org.apache.hadoop.hbase.regionserver.wal.HLogKey.java

@Override
public void readFields(DataInput in) throws IOException {
    Version version = Version.UNVERSIONED;
    // HLogKey was not versioned in the beginning.
    // In order to introduce it now, we make use of the fact
    // that encodedRegionName was written with Bytes.writeByteArray,
    // which encodes the array length as a vint which is >= 0.
    // Hence if the vint is >= 0 we have an old version and the vint
    // encodes the length of encodedRegionName.
    // If < 0 we just read the version and the next vint is the length.
    // @see Bytes#readByteArray(DataInput)
    this.scopes = null; // writable HLogKey does not contain scopes
    int len = WritableUtils.readVInt(in);
    byte[] tablenameBytes = null;
    if (len < 0) {
        // what we just read was the version
        version = Version.fromCode(len);
        // We only compress V2 of HLogkey.
        // If compression is on, the length is handled by the dictionary
        if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
            len = WritableUtils.readVInt(in);
        }//from w w  w. ja  v  a  2  s .  c  om
    }
    if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
        this.encodedRegionName = new byte[len];
        in.readFully(this.encodedRegionName);
        tablenameBytes = Bytes.readByteArray(in);
    } else {
        this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict);
        tablenameBytes = Compressor.readCompressed(in, compressionContext.tableDict);
    }

    this.logSeqNum = in.readLong();
    this.writeTime = in.readLong();

    this.clusterIds.clear();
    if (version.atLeast(Version.INITIAL)) {
        if (in.readBoolean()) {
            // read the older log
            // Definitely is the originating cluster
            clusterIds.add(new UUID(in.readLong(), in.readLong()));
        }
    } else {
        try {
            // dummy read (former byte cluster id)
            in.readByte();
        } catch (EOFException e) {
            // Means it's a very old key, just continue
        }
    }
    try {
        this.tablename = TableName.valueOf(tablenameBytes);
    } catch (IllegalArgumentException iae) {
        if (Bytes.toString(tablenameBytes).equals(TableName.OLD_META_STR)) {
            // It is a pre-namespace meta table edit, continue with new format.
            LOG.info("Got an old .META. edit, continuing with new format ");
            this.tablename = TableName.META_TABLE_NAME;
            this.encodedRegionName = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes();
        } else if (Bytes.toString(tablenameBytes).equals(TableName.OLD_ROOT_STR)) {
            this.tablename = TableName.OLD_ROOT_TABLE_NAME;
            throw iae;
        } else
            throw iae;
    }
    // Do not need to read the clusters information as we are using protobufs from 0.95
}

From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java

/**
 * Read a {@link Writable}, {@link String}, primitive type, or an array of
 * the preceding.//from w ww. ja  va  2s.c o m
 * @param in
 * @param objectWritable
 * @param conf
 * @return the object
 * @throws IOException
 */
@SuppressWarnings("unchecked")
static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf)
        throws IOException {
    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
    Object instance;
    if (declaredClass.isPrimitive()) { // primitive types
        if (declaredClass == Boolean.TYPE) { // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) { // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) { // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) { // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) { // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) { // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) { // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) { // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) { // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }
    } else if (declaredClass.isArray()) { // array
        if (declaredClass.equals(byte[].class)) {
            instance = Bytes.readByteArray(in);
        } else {
            int length = in.readInt();
            instance = Array.newInstance(declaredClass.getComponentType(), length);
            for (int i = 0; i < length; i++) {
                Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE
        Class<?> componentType = readClass(conf, in);
        int length = in.readInt();
        instance = Array.newInstance(componentType, length);
        for (int i = 0; i < length; i++) {
            Array.set(instance, i, readObject(in, conf));
        }
    } else if (List.class.isAssignableFrom(declaredClass)) { // List
        int length = in.readInt();
        instance = new ArrayList(length);
        for (int i = 0; i < length; i++) {
            ((ArrayList) instance).add(readObject(in, conf));
        }
    } else if (declaredClass == String.class) { // String
        instance = Text.readString(in);
    } else if (declaredClass.isEnum()) { // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
    } else if (declaredClass == Message.class) {
        String className = Text.readString(in);
        try {
            declaredClass = getClassByName(conf, className);
            instance = tryInstantiateProtobuf(declaredClass, in);
        } catch (ClassNotFoundException e) {
            LOG.error("Can't find class " + className, e);
            throw new IOException("Can't find class " + className, e);
        }
    } else if (Scan.class.isAssignableFrom(declaredClass)) {
        int length = in.readInt();
        byte[] scanBytes = new byte[length];
        in.readFully(scanBytes);
        ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
        instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build());
    } else { // Writable or Serializable
        Class instanceClass = null;
        int b = (byte) WritableUtils.readVInt(in);
        if (b == NOT_ENCODED) {
            String className = Text.readString(in);
            try {
                instanceClass = getClassByName(conf, className);
            } catch (ClassNotFoundException e) {
                LOG.error("Can't find class " + className, e);
                throw new IOException("Can't find class " + className, e);
            }
        } else {
            instanceClass = CODE_TO_CLASS.get(b);
        }
        if (Writable.class.isAssignableFrom(instanceClass)) {
            Writable writable = WritableFactories.newInstance(instanceClass, conf);
            try {
                writable.readFields(in);
            } catch (Exception e) {
                LOG.error("Error in readFields", e);
                throw new IOException("Error in readFields", e);
            }
            instance = writable;
            if (instanceClass == NullInstance.class) { // null
                declaredClass = ((NullInstance) instance).declaredClass;
                instance = null;
            }
        } else {
            int length = in.readInt();
            byte[] objectBytes = new byte[length];
            in.readFully(objectBytes);
            ByteArrayInputStream bis = null;
            ObjectInputStream ois = null;
            try {
                bis = new ByteArrayInputStream(objectBytes);
                ois = new ObjectInputStream(bis);
                instance = ois.readObject();
            } catch (ClassNotFoundException e) {
                LOG.error("Class not found when attempting to deserialize object", e);
                throw new IOException("Class not found when attempting to " + "deserialize object", e);
            } finally {
                if (bis != null)
                    bis.close();
                if (ois != null)
                    ois.close();
            }
        }
    }
    if (objectWritable != null) { // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }
    return instance;
}

From source file:org.apache.hadoop.hdfs.protocol.Block.java

final void readHelper(DataInput in) throws IOException {
    this.blockId = in.readLong();
    this.numBytes = in.readLong();
    this.generationStamp = in.readLong();
    this.classId = in.readLong();
    if (numBytes < 0) {
        throw new IOException("Unexpected block size: " + numBytes);
    }//from ww  w  . j  a  va2s. com
}

From source file:org.apache.hadoop.hdfs.protocol.Block.java

public void readId(DataInput in) throws IOException {
    this.blockId = in.readLong();
    this.generationStamp = in.readLong();
}

From source file:org.apache.hadoop.hdfs.protocol.LocatedBlocks.java

public void readFields(DataInput in) throws IOException {
    this.fileLength = in.readLong();
    underConstruction = in.readBoolean();
    // read located blocks
    int nrBlocks = in.readInt();
    this.blocks = new ArrayList<LocatedBlock>(nrBlocks);
    for (int idx = 0; idx < nrBlocks; idx++) {
        LocatedBlock blk = new LocatedBlock();
        blk.readFields(in);//ww w  . jav  a 2 s  .com
        this.blocks.add(blk);
    }
}

From source file:org.apache.hadoop.hdfs.server.datanode.BlockCrcInfoWritable.java

@Override
public void readFields(DataInput in) throws IOException {
    this.blockId = in.readLong();
    this.blockGenStamp = in.readLong();
    this.blockCrc = in.readInt();
}

From source file:org.apache.hadoop.hdfs.server.namenode.FSImageSerialization.java

static INodeFileUnderConstruction readINodeUnderConstruction(DataInput in, FSNamesystem fsNamesys,
        int imgVersion) throws IOException {
    byte[] name = readBytes(in);
    long inodeId = LayoutVersion.supports(Feature.ADD_INODE_ID, imgVersion) ? in.readLong()
            : fsNamesys.allocateNewInodeId();
    short blockReplication = in.readShort();
    long modificationTime = in.readLong();
    long preferredBlockSize = in.readLong();

    int numBlocks = in.readInt();
    BlockInfo[] blocks = new BlockInfo[numBlocks];
    Block blk = new Block();
    int i = 0;/*from  w  ww  . j  av a 2 s. c om*/
    for (; i < numBlocks - 1; i++) {
        blk.readFields(in);
        blocks[i] = new BlockInfo(blk, blockReplication);
    }
    // last block is UNDER_CONSTRUCTION
    if (numBlocks > 0) {
        blk.readFields(in);
        blocks[i] = new BlockInfoUnderConstruction(blk, blockReplication, BlockUCState.UNDER_CONSTRUCTION,
                null);
    }
    PermissionStatus perm = PermissionStatus.read(in);
    String clientName = readString(in);
    String clientMachine = readString(in);

    // We previously stored locations for the last block, now we
    // just record that there are none
    int numLocs = in.readInt();
    assert numLocs == 0 : "Unexpected block locations";

    return new INodeFileUnderConstruction(inodeId, name, blockReplication, modificationTime, preferredBlockSize,
            blocks, perm, clientName, clientMachine, null);
}

From source file:org.apache.hadoop.hdfs.server.namenode.FSImageSerialization.java

public static Block[] readCompactBlockArray(DataInput in, int logVersion) throws IOException {
    int num = WritableUtils.readVInt(in);
    if (num < 0) {
        throw new IOException("Invalid block array length: " + num);
    }//ww w. j a v  a  2  s .c  om
    Block prev = null;
    Block[] ret = new Block[num];
    for (int i = 0; i < num; i++) {
        long id = in.readLong();
        long sz = WritableUtils.readVLong(in) + ((prev != null) ? prev.getNumBytes() : 0);
        long gs = WritableUtils.readVLong(in) + ((prev != null) ? prev.getGenerationStamp() : 0);
        ret[i] = new Block(id, sz, gs);
        prev = ret[i];
    }
    return ret;
}

From source file:org.apache.hadoop.hive.jdbc.storagehandler.JdbcDBInputSplit.java

public void readFields(DataInput input) throws IOException {
    this.start = input.readLong();
    this.end = input.readLong();

}

From source file:org.apache.hadoop.hive.ql.io.orc.OrcSplit.java

@Override
public void readFields(DataInput in) throws IOException {
    //deserialize path, offset, length using FileSplit
    super.readFields(in);

    byte flags = in.readByte();
    hasFooter = (FOOTER_FLAG & flags) != 0;
    isOriginal = (ORIGINAL_FLAG & flags) != 0;
    hasBase = (BASE_FLAG & flags) != 0;
    boolean hasFileId = (HAS_FILEID_FLAG & flags) != 0;

    deltas.clear();/*from  w w w.  ja v a  2  s .c  o m*/
    int numDeltas = in.readInt();
    for (int i = 0; i < numDeltas; i++) {
        AcidInputFormat.DeltaMetaData dmd = new AcidInputFormat.DeltaMetaData();
        dmd.readFields(in);
        deltas.add(dmd);
    }
    if (hasFooter) {
        // deserialize FileMetaInfo fields
        String compressionType = Text.readString(in);
        int bufferSize = WritableUtils.readVInt(in);
        int metadataSize = WritableUtils.readVInt(in);

        // deserialize FileMetaInfo field footer
        int footerBuffSize = WritableUtils.readVInt(in);
        ByteBuffer footerBuff = ByteBuffer.allocate(footerBuffSize);
        in.readFully(footerBuff.array(), 0, footerBuffSize);
        OrcFile.WriterVersion writerVersion = ReaderImpl.getWriterVersion(WritableUtils.readVInt(in));

        fileMetaInfo = new ReaderImpl.FileMetaInfo(compressionType, bufferSize, metadataSize, footerBuff,
                writerVersion);
    }
    if (hasFileId) {
        fileId = in.readLong();
    }
}