Example usage for java.io DataInputStream readFully

List of usage examples for java.io DataInputStream readFully

Introduction

In this page you can find the example usage for java.io DataInputStream readFully.

Prototype

public final void readFully(byte b[]) throws IOException 

Source Link

Document

See the general contract of the readFully method of DataInput .

Usage

From source file:org.agnitas.util.AgnUtils.java

/**
 * Reads a file./*from w w w  . ja  v  a2s.c  o  m*/
 */
public static String readFile(String path) {
    String value = null;

    try {
        File aFile = new File(path);
        byte[] b = new byte[(int) aFile.length()];
        DataInputStream in = new DataInputStream(new FileInputStream(aFile));
        in.readFully(b);
        value = new String(b);
        in.close();
    } catch (Exception e) {
        value = null;
    }

    return value;
}

From source file:bobs.is.compress.sevenzip.SevenZFile.java

private DataInputStream readEncodedHeader(final DataInputStream header, final Archive archive,
        final byte[] password) throws IOException {
    readStreamsInfo(header, archive);// w w w  . ja  v  a 2 s  .  c o m

    // FIXME: merge with buildDecodingStream()/buildDecoderStack() at some stage?
    final Folder folder = archive.folders[0];
    final int firstPackStreamIndex = 0;
    final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos + 0;

    file.seek(folderOffset);
    InputStream inputStreamStack = new BoundedRandomAccessFileInputStream(file,
            archive.packSizes[firstPackStreamIndex]);
    for (final Coder coder : folder.getOrderedCoders()) {
        if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
            throw new IOException("Multi input/output stream coders are not yet supported");
        }
        inputStreamStack = Coders.addDecoder(fileName, inputStreamStack, folder.getUnpackSizeForCoder(coder),
                coder, password);
    }
    if (folder.hasCrc) {
        inputStreamStack = new CRC32VerifyingInputStream(inputStreamStack, folder.getUnpackSize(), folder.crc);
    }
    final byte[] nextHeader = new byte[(int) folder.getUnpackSize()];
    final DataInputStream nextHeaderInputStream = new DataInputStream(inputStreamStack);
    try {
        nextHeaderInputStream.readFully(nextHeader);
    } finally {
        nextHeaderInputStream.close();
    }
    return new DataInputStream(new ByteArrayInputStream(nextHeader));
}

From source file:org.getspout.spout.packet.PacketAddonData.java

@SuppressWarnings("unchecked")
public void readData(DataInputStream input) throws IOException {
    String id = PacketUtil.readString(input);

    boolean sandboxed = SpoutClient.isSandboxed();
    SpoutClient.enableSandbox();/*from w  w  w .  j a  v a 2s. com*/

    try {
        Class<? extends AddonPacket> packetClass = AddonPacket.getPacketFromId(id);
        Constructor<? extends AddonPacket> constructor = null;
        Constructor<? extends AddonPacket>[] constructors = (Constructor<? extends AddonPacket>[]) packetClass
                .getConstructors();
        for (Constructor<? extends AddonPacket> c : constructors) {
            if (c.getGenericParameterTypes().length == 0) {
                constructor = c;
                break;
            }
        }
        packet = constructor.newInstance();
    } catch (Exception e) {
        e.printStackTrace();
    }

    if (!sandboxed) {
        SpoutClient.disableSandbox();
    }

    int size = input.readInt();
    compressed = input.readBoolean();
    data = new byte[size];
    input.readFully(data);
}

From source file:org.apache.jackrabbit.core.persistence.util.Serializer.java

/**
 * Deserializes a <code>PropertyState</code> object from the given binary
 * <code>stream</code>. Binary values are retrieved from the specified
 * <code>BLOBStore</code>./* w ww .  ja  va 2s  .c  o  m*/
 *
 * @param state     <code>state</code> to deserialize
 * @param stream    the stream where the <code>state</code> should be
 *                  deserialized from
 * @param blobStore handler for BLOB data
 * @throws Exception if an error occurs during the deserialization
 * @see #serialize(PropertyState, OutputStream, BLOBStore)
 */
public static void deserialize(PropertyState state, InputStream stream, BLOBStore blobStore) throws Exception {
    DataInputStream in = new DataInputStream(stream);

    // type
    int type = in.readInt();
    state.setType(type);
    // multiValued
    boolean multiValued = in.readBoolean();
    state.setMultiValued(multiValued);
    // definitionId
    in.readUTF();
    // modCount
    short modCount = in.readShort();
    state.setModCount(modCount);
    // values
    int count = in.readInt(); // count
    InternalValue[] values = new InternalValue[count];
    for (int i = 0; i < count; i++) {
        InternalValue val;
        if (type == PropertyType.BINARY) {
            String s = in.readUTF(); // value (i.e. blobId)
            // special handling required for binary value:
            // the value stores the id of the BLOB data
            // in the BLOB store
            if (blobStore instanceof ResourceBasedBLOBStore) {
                // optimization: if the BLOB store is resource-based
                // retrieve the resource directly rather than having
                // to read the BLOB from an input stream
                FileSystemResource fsRes = ((ResourceBasedBLOBStore) blobStore).getResource(s);
                val = InternalValue.create(fsRes);
            } else {
                InputStream is = blobStore.get(s);
                try {
                    val = InternalValue.create(is);
                } finally {
                    try {
                        is.close();
                    } catch (IOException e) {
                        // ignore
                    }
                }
            }
        } else {
            /**
             * because writeUTF(String) has a size limit of 65k,
             * Strings are serialized as <length><byte[]>
             */
            //s = in.readUTF();   // value
            int len = in.readInt(); // lenght of byte[]
            byte[] bytes = new byte[len];
            in.readFully(bytes); // byte[]
            String s = new String(bytes, ENCODING);
            val = InternalValue.valueOf(s, type);
        }
        values[i] = val;
    }
    state.setValues(values);
}

From source file:net.jradius.packet.attribute.AttributeFactory.java

/**
 * Creates a new RadiusAttribute/*ww w .  jav  a 2s .c  o  m*/
 * @param vendor The VendorID of the attribute (if one)
 * @param type The Attribute Type
 * @param value The Attribute Value
 * @param op The Attribute Operator
 * @return Returns the newly created RadiusAttribute
 */
public static RadiusAttribute newAttribute(long vendor, long type, byte[] value, int op, boolean pool) {
    RadiusAttribute attr = null;

    try {
        if (vendor > 1 || type == 26) {
            boolean onWire = (vendor < 1);
            DataInputStream input = null;

            if (onWire) {
                /*
                 *  We are parsing an off-the-wire packet
                 */
                ByteArrayInputStream bais = new ByteArrayInputStream(value);
                input = new DataInputStream(bais);

                vendor = RadiusFormat.readUnsignedInt(input);
                type = RadiusFormat.readUnsignedByte(input);
            }

            Long key = new Long(vendor << 16 | type);

            if (pool) {
                attr = borrow(key);
            }

            if (attr == null) {
                attr = vsa(vendor, type);
            }

            if (onWire) {
                VSAttribute vsa = (VSAttribute) attr;
                int vsaLength = 0;
                int vsaHeaderLen = 2;
                switch (vsa.getLengthLength()) {
                case 1:
                    vsaLength = RadiusFormat.readUnsignedByte(input);
                    break;
                case 2:
                    vsaLength = RadiusFormat.readUnsignedShort(input);
                    vsaHeaderLen++;
                    break;
                case 4:
                    vsaLength = (int) RadiusFormat.readUnsignedInt(input);
                    vsaHeaderLen += 3;
                    break;
                }
                if (vsa.hasContinuationByte) {
                    vsa.continuation = (short) RadiusFormat.readUnsignedByte(input);
                    vsaHeaderLen++;
                }
                byte[] newValue = new byte[vsaLength - vsaHeaderLen];
                input.readFully(newValue);
                input.close();
                value = newValue;
            }
        } else {
            if (pool) {
                attr = borrow(type);
            }

            if (attr == null) {
                attr = attr(type);
            }
        }

        if (value != null)
            attr.setValue(value);
        else
            attr.setValue(new byte[] {});
        if (op > -1)
            attr.setAttributeOp(op);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return attr;
}

From source file:SoundManagerTest.java

/**
 * Loads a Sound from an AudioInputStream.
 *///from  w w w. j  a  v a  2 s.  c  o  m
public Sound getSound(AudioInputStream audioStream) {
    if (audioStream == null) {
        return null;
    }

    // get the number of bytes to read
    int length = (int) (audioStream.getFrameLength() * audioStream.getFormat().getFrameSize());

    // read the entire stream
    byte[] samples = new byte[length];
    DataInputStream is = new DataInputStream(audioStream);
    try {
        is.readFully(samples);
        is.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    // return the samples
    return new Sound(samples);
}

From source file:br.org.indt.ndg.servlets.PostResults.java

private String Decompress(HttpServletRequest request) {
    DataInputStream dis = null;/*from w  w  w.j  av  a2 s . com*/
    DataInputStream objIn = null;
    ByteArrayOutputStream baos = null;
    String result = null;

    try {
        dis = new DataInputStream(request.getInputStream());
        baos = new ByteArrayOutputStream();

        int length, uncomplength = 0;
        int data = 0;

        uncomplength = dis.readInt();
        length = dis.readInt();

        for (int i = 0; i < length; i++) {
            data = dis.read();
            baos.write((byte) data);
        }

        ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
        ZInputStream zIn = new ZInputStream(bais);
        objIn = new DataInputStream(zIn);

        byte[] bytes = new byte[uncomplength];
        objIn.readFully(bytes);

        result = new String(bytes, ENCODING);

        log.info("Compressed length: " + length + " bytes");
        log.info("Decompressed length: " + result.getBytes().length + " bytes");

        zIn.close();
        dis.close();
        baos.close();
        objIn.close();
    } catch (EOFException e) {
        servletError = true;
        log.error(e);
    } catch (IOException e) {
        servletError = true;
        log.error(e);
    } catch (Exception e) {
        servletError = true;
        log.error(e);
    }

    return result;
}

From source file:Filter3dTest.java

/**
 * Gets the samples from an AudioInputStream as an array of bytes.
 *//*from  w ww  . j a  va2 s. c o  m*/
private byte[] getSamples(AudioInputStream audioStream) {
    // get the number of bytes to read
    int length = (int) (audioStream.getFrameLength() * format.getFrameSize());

    // read the entire stream
    byte[] samples = new byte[length];
    DataInputStream is = new DataInputStream(audioStream);
    try {
        is.readFully(samples);
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    // return the samples
    return samples;
}

From source file:org.apache.hadoop.security.SaslRpcClient.java

/**
 * Do client side SASL authentication with server via the given InputStream
 * and OutputStream/*from ww w . j a v  a 2s . co m*/
 * 
 * @param inS
 *          InputStream to use
 * @param outS
 *          OutputStream to use
 * @return true if connection is set up, or false if needs to switch 
 *             to simple Auth.
 * @throws IOException
 */
public boolean saslConnect(InputStream inS, OutputStream outS) throws IOException {
    DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS));
    DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(outS));

    try {
        byte[] saslToken = new byte[0];
        if (saslClient.hasInitialResponse())
            saslToken = saslClient.evaluateChallenge(saslToken);
        if (saslToken != null) {
            outStream.writeInt(saslToken.length);
            outStream.write(saslToken, 0, saslToken.length);
            outStream.flush();
            if (LOG.isDebugEnabled())
                LOG.debug("Have sent token of size " + saslToken.length + " from initSASLContext.");
        }
        if (!saslClient.isComplete()) {
            readStatus(inStream);
            int len = inStream.readInt();
            if (len == SaslRpcServer.SWITCH_TO_SIMPLE_AUTH) {
                if (LOG.isDebugEnabled())
                    LOG.debug("Server asks us to fall back to simple auth.");
                saslClient.dispose();
                return false;
            }
            saslToken = new byte[len];
            if (LOG.isDebugEnabled())
                LOG.debug("Will read input token of size " + saslToken.length
                        + " for processing by initSASLContext");
            inStream.readFully(saslToken);
        }

        while (!saslClient.isComplete()) {
            saslToken = saslClient.evaluateChallenge(saslToken);
            if (saslToken != null) {
                if (LOG.isDebugEnabled())
                    LOG.debug("Will send token of size " + saslToken.length + " from initSASLContext.");
                outStream.writeInt(saslToken.length);
                outStream.write(saslToken, 0, saslToken.length);
                outStream.flush();
            }
            if (!saslClient.isComplete()) {
                readStatus(inStream);
                saslToken = new byte[inStream.readInt()];
                if (LOG.isDebugEnabled())
                    LOG.debug("Will read input token of size " + saslToken.length
                            + " for processing by initSASLContext");
                inStream.readFully(saslToken);
            }
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("SASL client context established. Negotiated QoP: "
                    + saslClient.getNegotiatedProperty(Sasl.QOP));
        }
        return true;
    } catch (IOException e) {
        try {
            saslClient.dispose();
        } catch (SaslException ignored) {
            // ignore further exceptions during cleanup
        }
        throw e;
    }
}

From source file:org.carbondata.processing.util.LevelSortIndexWriterThread.java

private MemberSortModel[] getLevelData() throws IOException {
    DataInputStream fileChannel = null;
    long currPositionIndex = 0;
    long size = 0;
    ByteBuffer buffer = null;//from   www . j  a v  a 2  s  .  c  o  m

    // CHECKSTYLE:OFF
    boolean enableEncoding = Boolean
            .valueOf(CarbonProperties.getInstance().getProperty(CarbonCommonConstants.ENABLE_BASE64_ENCODING,
                    CarbonCommonConstants.ENABLE_BASE64_ENCODING_DEFAULT));
    // CHECKSTYLE:ON
    try {
        fileChannel = FileFactory.getDataInputStream(levelFilePath, FileFactory.getFileType(levelFilePath));
        CarbonFile memberFile = FileFactory.getCarbonFile(levelFilePath,
                FileFactory.getFileType(levelFilePath));
        size = memberFile.getSize() - 4;
        long skipSize = size;
        long actualSkipSize = 0;
        while (actualSkipSize != size) {
            actualSkipSize += fileChannel.skip(skipSize);
            skipSize = skipSize - actualSkipSize;
        }
        maxSurrogate = fileChannel.readInt();
    } catch (IOException e) {
        LOGGER.error(e, "problem while reading the level file");
        throw e;
    } finally {
        CarbonUtil.closeStreams(fileChannel);
    }

    try {
        fileChannel = FileFactory.getDataInputStream(levelFilePath, FileFactory.getFileType(levelFilePath));
        // CHECKSTYLE:OFF
        buffer = ByteBuffer.allocate((int) size);
        // CHECKSTYLE:ON
        fileChannel.readFully(buffer.array());
        buffer.rewind();
    } catch (IOException e) {
        LOGGER.error(e, "problem while reading the level file");
        throw e;
    } finally {
        CarbonUtil.closeStreams(fileChannel);
    }
    minSurrogate = buffer.getInt();
    MemberSortModel[] surogateKeyArrays = new MemberSortModel[maxSurrogate - minSurrogate + 1];
    int surrogateKeyIndex = minSurrogate;
    currPositionIndex += 4;
    int current = 0;

    while (currPositionIndex < size) {
        int len = buffer.getInt();
        // CHECKSTYLE:OFF
        // CHECKSTYLE:ON
        currPositionIndex += 4;
        byte[] rowBytes = new byte[len];
        buffer.get(rowBytes);
        currPositionIndex += len;
        String memberName = null;// CHECKSTYLE:OFF
        if (!memberDataType.equals(DataType.STRING)) {
            if (enableEncoding) {
                memberName = new String(Base64.decodeBase64(rowBytes), Charset.defaultCharset());
            } else {
                memberName = new String(rowBytes, Charset.defaultCharset());
            }
            surogateKeyArrays[current] = new MemberSortModel(surrogateKeyIndex, memberName, null,
                    memberDataType);
        } else {
            if (enableEncoding) {
                rowBytes = Base64.decodeBase64(rowBytes);
            }
            surogateKeyArrays[current] = new MemberSortModel(surrogateKeyIndex, null, rowBytes, memberDataType);
        }
        surrogateKeyIndex++;
        current++;
    }
    return surogateKeyArrays;
}