Example usage for java.io DataInput readBoolean

List of usage examples for java.io DataInput readBoolean

Introduction

In this page you can find the example usage for java.io DataInput readBoolean.

Prototype

boolean readBoolean() throws IOException;

Source Link

Document

Reads one input byte and returns true if that byte is nonzero, false if that byte is zero.

Usage

From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java

/**
 * Read a {@link Writable}, {@link String}, primitive type, or an array of
 * the preceding.//from w ww.  j a  va2  s.co m
 * @param in
 * @param objectWritable
 * @param conf
 * @return the object
 * @throws IOException
 */
@SuppressWarnings("unchecked")
static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf)
        throws IOException {
    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
    Object instance;
    if (declaredClass.isPrimitive()) { // primitive types
        if (declaredClass == Boolean.TYPE) { // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) { // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) { // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) { // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) { // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) { // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) { // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) { // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) { // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }
    } else if (declaredClass.isArray()) { // array
        if (declaredClass.equals(byte[].class)) {
            instance = Bytes.readByteArray(in);
        } else {
            int length = in.readInt();
            instance = Array.newInstance(declaredClass.getComponentType(), length);
            for (int i = 0; i < length; i++) {
                Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE
        Class<?> componentType = readClass(conf, in);
        int length = in.readInt();
        instance = Array.newInstance(componentType, length);
        for (int i = 0; i < length; i++) {
            Array.set(instance, i, readObject(in, conf));
        }
    } else if (List.class.isAssignableFrom(declaredClass)) { // List
        int length = in.readInt();
        instance = new ArrayList(length);
        for (int i = 0; i < length; i++) {
            ((ArrayList) instance).add(readObject(in, conf));
        }
    } else if (declaredClass == String.class) { // String
        instance = Text.readString(in);
    } else if (declaredClass.isEnum()) { // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
    } else if (declaredClass == Message.class) {
        String className = Text.readString(in);
        try {
            declaredClass = getClassByName(conf, className);
            instance = tryInstantiateProtobuf(declaredClass, in);
        } catch (ClassNotFoundException e) {
            LOG.error("Can't find class " + className, e);
            throw new IOException("Can't find class " + className, e);
        }
    } else if (Scan.class.isAssignableFrom(declaredClass)) {
        int length = in.readInt();
        byte[] scanBytes = new byte[length];
        in.readFully(scanBytes);
        ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
        instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build());
    } else { // Writable or Serializable
        Class instanceClass = null;
        int b = (byte) WritableUtils.readVInt(in);
        if (b == NOT_ENCODED) {
            String className = Text.readString(in);
            try {
                instanceClass = getClassByName(conf, className);
            } catch (ClassNotFoundException e) {
                LOG.error("Can't find class " + className, e);
                throw new IOException("Can't find class " + className, e);
            }
        } else {
            instanceClass = CODE_TO_CLASS.get(b);
        }
        if (Writable.class.isAssignableFrom(instanceClass)) {
            Writable writable = WritableFactories.newInstance(instanceClass, conf);
            try {
                writable.readFields(in);
            } catch (Exception e) {
                LOG.error("Error in readFields", e);
                throw new IOException("Error in readFields", e);
            }
            instance = writable;
            if (instanceClass == NullInstance.class) { // null
                declaredClass = ((NullInstance) instance).declaredClass;
                instance = null;
            }
        } else {
            int length = in.readInt();
            byte[] objectBytes = new byte[length];
            in.readFully(objectBytes);
            ByteArrayInputStream bis = null;
            ObjectInputStream ois = null;
            try {
                bis = new ByteArrayInputStream(objectBytes);
                ois = new ObjectInputStream(bis);
                instance = ois.readObject();
            } catch (ClassNotFoundException e) {
                LOG.error("Class not found when attempting to deserialize object", e);
                throw new IOException("Class not found when attempting to " + "deserialize object", e);
            } finally {
                if (bis != null)
                    bis.close();
                if (ois != null)
                    ois.close();
            }
        }
    }
    if (objectWritable != null) { // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }
    return instance;
}

From source file:org.apache.hadoop.hbase.security.access.TablePermission.java

@Override
public void readFields(DataInput in) throws IOException {
    super.readFields(in);
    byte[] tableBytes = Bytes.readByteArray(in);
    table = TableName.valueOf(tableBytes);
    if (in.readBoolean()) {
        family = Bytes.readByteArray(in);
    }/*from  w  w w  . ja  v a2 s  .  co m*/
    if (in.readBoolean()) {
        qualifier = Bytes.readByteArray(in);
    }
    if (in.readBoolean()) {
        namespace = Bytes.toString(Bytes.readByteArray(in));
    }
}

From source file:org.apache.hadoop.hdfs.protocol.LocatedBlocks.java

public void readFields(DataInput in) throws IOException {
    this.fileLength = in.readLong();
    underConstruction = in.readBoolean();
    // read located blocks
    int nrBlocks = in.readInt();
    this.blocks = new ArrayList<LocatedBlock>(nrBlocks);
    for (int idx = 0; idx < nrBlocks; idx++) {
        LocatedBlock blk = new LocatedBlock();
        blk.readFields(in);/*from  w ww  .j a v  a 2  s  .c om*/
        this.blocks.add(blk);
    }
}

From source file:org.apache.hadoop.hive.accumulo.AccumuloHiveRow.java

@Override
public void readFields(DataInput dataInput) throws IOException {
    if (dataInput.readBoolean()) {
        rowId = dataInput.readUTF();/*from www. j av a  2 s  .  c o  m*/
    }
    int size = dataInput.readInt();
    for (int i = 0; i < size; i++) {
        int cfLength = dataInput.readInt();
        byte[] cfData = new byte[cfLength];
        dataInput.readFully(cfData, 0, cfLength);
        Text cf = new Text(cfData);
        int cqLength = dataInput.readInt();
        byte[] cqData = new byte[cqLength];
        dataInput.readFully(cqData, 0, cqLength);
        Text cq = new Text(cqData);
        int valSize = dataInput.readInt();
        byte[] val = new byte[valSize];
        for (int j = 0; j < valSize; j++) {
            val[j] = dataInput.readByte();
        }
        tuples.add(new ColumnTuple(cf, cq, val));
    }
}

From source file:org.apache.hadoop.hive.llap.security.LlapTokenIdentifier.java

@Override
public void readFields(DataInput in) throws IOException {
    super.readFields(in);
    clusterId = in.readUTF();/*from   w  w  w. jav a  2 s.  c  om*/
    Preconditions.checkNotNull(clusterId);
    appId = in.readUTF();
    isSigningRequired = in.readBoolean();
    appId = appId == null ? "" : appId;
}

From source file:org.apache.hadoop.ipc.chinamobile.ConnectionHeader.java

@Override
public void readFields(DataInput in) throws IOException {
    protocol = Text.readString(in);
    if (protocol.isEmpty()) {
        protocol = null;/*from   w  w w . j av  a  2  s  . co m*/
    }

    boolean ugiPresent = in.readBoolean();
    if (ugiPresent) {
        ugi.readFields(in);
    } else {
        ugi = null;
    }
}

From source file:org.apache.hadoop.ipc.ConnectionHeader.java

@Override
public void readFields(DataInput in) throws IOException {
    protocol = Text.readString(in);
    if (protocol.isEmpty()) {
        protocol = null;//from ww  w  .j  a v  a  2s . c om
    }

    boolean ugiUsernamePresent = in.readBoolean();
    if (ugiUsernamePresent) {
        String username = in.readUTF();
        boolean realUserNamePresent = in.readBoolean();
        if (realUserNamePresent) {
            String realUserName = in.readUTF();
            UserGroupInformation realUserUgi = UserGroupInformation.createRemoteUser(realUserName);
            ugi = UserGroupInformation.createProxyUser(username, realUserUgi);
        } else {
            ugi = UserGroupInformation.createRemoteUser(username);
        }
    } else {
        ugi = null;
    }
}

From source file:org.apache.hadoop.mapred.LaunchTaskAction.java

public void readFields(DataInput in) throws IOException {
    boolean isMapTask = in.readBoolean();
    if (isMapTask) {
        task = new MapTask();
    } else {//w w w.  j ava 2 s. c om
        task = new ReduceTask();
    }

    task.readFields(in);
}

From source file:org.apache.hadoop.mapred.Task.java

public void readFields(DataInput in) throws IOException {
    jobFile = Text.readString(in);
    taskId = TaskAttemptID.read(in);/* w  w  w  .ja v  a 2  s  .  c  o  m*/
    partition = in.readInt();
    numSlotsRequired = in.readInt();
    taskStatus.readFields(in);
    skipRanges.readFields(in);
    currentRecIndexIterator = skipRanges.skipRangeIterator();
    currentRecStartIndex = currentRecIndexIterator.next();
    skipping = in.readBoolean();
    jobCleanup = in.readBoolean();
    if (jobCleanup) {
        jobRunStateForCleanup = WritableUtils.readEnum(in, JobStatus.State.class);
    }
    jobSetup = in.readBoolean();
    writeSkipRecs = in.readBoolean();
    taskCleanup = in.readBoolean();
    if (taskCleanup) {
        setPhase(TaskStatus.Phase.CLEANUP);
    }
    user = Text.readString(in);
}

From source file:org.apache.hadoop.mapred.TaskStatus.java

public void readFields(DataInput in) throws IOException {
    this.taskid.readFields(in);
    this.progress = in.readFloat();
    this.numSlots = in.readInt();
    this.runState = WritableUtils.readEnum(in, State.class);
    this.diagnosticInfo = Text.readString(in);
    this.stateString = Text.readString(in);
    this.phase = WritableUtils.readEnum(in, Phase.class);
    this.startTime = in.readLong();
    this.finishTime = in.readLong();
    counters = new Counters();
    this.includeCounters = in.readBoolean();
    this.outputSize = in.readLong();
    if (includeCounters) {
        counters.readFields(in);/*from   www .j  a va 2 s  . c  o  m*/
    }
    nextRecordRange.readFields(in);
}