Example usage for java.io DataInputStream read

List of usage examples for java.io DataInputStream read

Introduction

In this page you can find the example usage for java.io DataInputStream read.

Prototype

public final int read(byte b[], int off, int len) throws IOException 

Source Link

Document

Reads up to len bytes of data from the contained input stream into an array of bytes.

Usage

From source file:de.ingrid.communication.authentication.BasicSchemeConnector.java

private boolean readMessageFromHttpProxy(DataInputStream dataInput, StringBuffer errorBuffer)
        throws IOException {
    boolean ret = false;
    byte[] buffer = new byte[1024];
    while ((dataInput.read(buffer, 0, buffer.length)) != -1) {
        String readedString = new String(buffer);
        errorBuffer.append(readedString);
        if (readedString.toLowerCase().indexOf(ACCEPT_MESSAGE_HTTP_1_0.toLowerCase()) > -1) {
            ret = true;//from   w  w  w .j  a  v  a  2s .c om
            break;
        } else if (readedString.toLowerCase().indexOf(ACCEPT_MESSAGE_HTTP_1_1.toLowerCase()) > -1) {
            ret = true;
            break;
        }
        buffer = new byte[1024];
    }
    return ret;
}

From source file:com.facebook.infrastructure.db.ReadResponse.java

public ReadResponse deserialize(DataInputStream dis) throws IOException {
    String table = dis.readUTF();
    int digestSize = dis.readInt();
    byte[] digest = new byte[digestSize];
    dis.read(digest, 0, digestSize);
    boolean isDigest = dis.readBoolean();

    Row row = null;//  www.  j  a v a 2s.c om
    if (!isDigest) {
        row = Row.serializer().deserialize(dis);
    }

    ReadResponse rmsg = null;
    if (isDigest) {
        rmsg = new ReadResponse(table, digest);
    } else {
        rmsg = new ReadResponse(table, row);
    }
    rmsg.setIsDigestQuery(isDigest);
    return rmsg;
}

From source file:org.apache.hadoop.io.crypto.tool.CryptoApiTool.java

private void writeStream(DataInputStream input, CompressionOutputStream output) throws IOException {
    int read = 0;
    byte[] buffer = new byte[128 * 1024];

    // while (0 < (read = input.read(buffer, 0, 64 * 1024))) {
    while (0 < (read = input.read(buffer, 0, 67 * 1024))) {
        output.write(buffer, 0, read);/*w w w .  j  a  v  a 2 s .c  o m*/
    }
    output.flush();

}

From source file:gobblin.metrics.reporter.util.SchemaRegistryVersionWriter.java

@Override
public Schema readSchemaVersioningInformation(DataInputStream inputStream) throws IOException {
    if (inputStream.readByte() != KafkaAvroSchemaRegistry.MAGIC_BYTE) {
        throw new IOException("MAGIC_BYTE not found in Avro message.");
    }/*from   w  w w.ja va 2 s  .c  o  m*/

    byte[] byteKey = new byte[schemaIdLengthBytes];
    int bytesRead = inputStream.read(byteKey, 0, schemaIdLengthBytes);
    if (bytesRead != schemaIdLengthBytes) {
        throw new IOException(
                String.format("Could not read enough bytes for schema id. Expected: %d, found: %d.",
                        schemaIdLengthBytes, bytesRead));
    }
    String hexKey = Hex.encodeHexString(byteKey);

    try {
        return this.registry.getSchemaByKey(hexKey);
    } catch (SchemaRegistryException sre) {
        throw new IOException("Failed to retrieve schema for key " + hexKey, sre);
    }
}

From source file:org.apache.hadoop.hdfs.server.namenode.metrics.TestNameNodeMetrics.java

private void readFile(FileSystem fileSys, Path name) throws IOException {
    //Just read file so that getNumBlockLocations are incremented
    DataInputStream stm = fileSys.open(name);
    byte[] buffer = new byte[4];
    stm.read(buffer, 0, 4);
    stm.close();// w w  w .  j  a  v a  2s .c o  m
}

From source file:org.apache.helix.tools.ClusterSetup.java

private static byte[] readFile(String filePath) throws IOException {
    File file = new File(filePath);

    int size = (int) file.length();
    byte[] bytes = new byte[size];
    DataInputStream dis = new DataInputStream(new FileInputStream(file));
    int read = 0;
    int numRead = 0;
    while (read < bytes.length && (numRead = dis.read(bytes, read, bytes.length - read)) >= 0) {
        read = read + numRead;/*w w w  .ja v a  2s.  c om*/
    }
    dis.close();
    return bytes;
}

From source file:edu.cornell.med.icb.goby.compression.HybridChunkCodec2.java

@Override
public Message decode(final byte[] bytes) throws IOException {
    final DataInputStream completeChunkData = new DataInputStream(new ByteArrayInputStream(bytes));
    final int compressedSize = completeChunkData.readInt();
    final int storedChecksum = completeChunkData.readInt();

    final byte[] compressedBytes = new byte[compressedSize];
    final int read = completeChunkData.read(compressedBytes, 0, compressedSize);
    assert read == compressedSize : "read size must match recorded size.";
    crc32.reset();//from w w  w  .j a  v  a2 s . c o m

    crc32.update(compressedBytes);
    final int computedChecksum = (int) crc32.getValue();
    if (computedChecksum != storedChecksum) {
        throw new InvalidChecksumException();
    }
    final int bytesLeft = bytes.length - 4 - compressedSize - 4;
    final byte[] leftOver = new byte[bytesLeft];
    // 8 is the number of bytes to encode the length of the compressed chunk, plus
    // the number of bytes to encode the checksum.
    System.arraycopy(bytes, 8 + compressedSize, leftOver, 0, bytesLeft);
    final Message reducedProtoBuff = bzip2Codec.decode(leftOver);
    if (reducedProtoBuff == null) {
        return null;
    }
    return handler.decompressCollection(reducedProtoBuff, compressedBytes);
}

From source file:edu.cornell.med.icb.goby.compression.HybridChunkCodec1.java

@Override
public Message decode(final byte[] bytes) throws IOException {
    final DataInputStream completeChunkData = new DataInputStream(new FastByteArrayInputStream(bytes));
    final int compressedSize = completeChunkData.readInt();
    final int storedChecksum = completeChunkData.readInt();

    final byte[] compressedBytes = new byte[compressedSize];
    final int read = completeChunkData.read(compressedBytes, 0, compressedSize);
    assert read == compressedSize : "read size must match recorded size.";
    crc32.reset();// w ww  . j  a va2s .c o m

    crc32.update(compressedBytes);
    final int computedChecksum = (int) crc32.getValue();
    if (computedChecksum != storedChecksum) {
        throw new InvalidChecksumException();
    }
    final int bytesLeft = bytes.length - 4 - compressedSize - 4;
    final byte[] leftOver = new byte[bytesLeft];
    // 8 is the number of bytes to encode the length of the compressed chunk, plus
    // the number of bytes to encode the checksum.
    System.arraycopy(bytes, 8 + compressedSize, leftOver, 0, bytesLeft);
    final Message reducedProtoBuff = gzipCodec.decode(leftOver);
    if (reducedProtoBuff == null) {
        return null;
    }
    return handler.decompressCollection(reducedProtoBuff, compressedBytes);
}

From source file:org.apache.helix.manager.zk.ZKHelixAdmin.java

private static byte[] readFile(String filePath) throws IOException {
    File file = new File(filePath);

    int size = (int) file.length();
    byte[] bytes = new byte[size];
    DataInputStream dis = null;
    try {/*from   w ww .j a v a  2s.co  m*/
        dis = new DataInputStream(new FileInputStream(file));
        int read = 0;
        int numRead = 0;
        while (read < bytes.length && (numRead = dis.read(bytes, read, bytes.length - read)) >= 0) {
            read = read + numRead;
        }
        return bytes;
    } finally {
        if (dis != null) {
            dis.close();
        }
    }
}

From source file:com.linkedin.helix.tools.ClusterSetup.java

private static byte[] readFile(String filePath) throws IOException {
    File file = new File(filePath);

    int size = (int) file.length();
    byte[] bytes = new byte[size];
    DataInputStream dis = new DataInputStream(new FileInputStream(file));
    int read = 0;
    int numRead = 0;
    while (read < bytes.length && (numRead = dis.read(bytes, read, bytes.length - read)) >= 0) {
        read = read + numRead;//from  ww w  .ja v a2  s  .  c  om
    }
    return bytes;
}