Example usage for java.io DataInputStream read

List of usage examples for java.io DataInputStream read

Introduction

In this page you can find the example usage for java.io DataInputStream read.

Prototype

public int read() throws IOException 

Source Link

Document

Reads the next byte of data from this input stream.

Usage

From source file:org.apache.hadoop.io.TestBufferedByteInputOutput.java

/**
 * Test reading from closed buffer.//from   w w  w. j  a va 2s .  c  o m
 */
@Test
public void testCloseInput() throws IOException {
    LOG.info("Running test close input");
    setUp(1000);

    // input is of size 1000, so the ReadThread will attempt to write to
    // the buffer, which will fail, but we should be able to read 100 bytes
    ByteArrayInputStream is = new ByteArrayInputStream(input);
    DataInputStream dis = BufferedByteInputStream.wrapInputStream(is, 100, 10);

    // wait for the thread to read from is and
    // write to the buffer
    while (dis.available() < 100) {
        sleep(10);
    }

    // no more writes to the internal buffer
    dis.close();

    try {
        dis.read(); // read will call DataInputStream fill() which should fail
        fail("Read should fail because we are closed");
    } catch (Exception e) {
        LOG.info("Expected exception " + e.getMessage());
    }

    dis.close(); // can call multiple close()  

    try {
        dis.read(new byte[10], 0, 10);
        fail("Read should fail because we are closed");
    } catch (Exception e) {
        LOG.info("Expected exception " + e.getMessage());
    }

    try {
        dis.available();
        fail("Available should fail because we are closed");
    } catch (Exception e) {
        LOG.info("Expected exception " + e.getMessage());
    }
}

From source file:netinf.node.resolution.bocaching.impl.BOCacheImpl.java

@Override
public boolean cache(DataObject dataObject) {
    String hash = DatamodelUtils.getHash(dataObject);
    String directory = server.getDirectory(); // .replace('\\', '/')

    if (!directory.endsWith(File.separator)) {
        directory += File.separator;
    }//w ww .  j  a va  2 s  . c  om
    if (hash == null) {
        LOG.info("DataObject has no Hash and will not be cached");
        return false;
    }

    if (!contains(dataObject)) {
        LOG.log(DemoLevel.DEMO, "(BOCache ) Cache file...");
        List<Attribute> locators = dataObject
                .getAttributesForPurpose(DefinedAttributePurpose.LOCATOR_ATTRIBUTE.toString());
        for (Attribute attr : locators) {
            DataInputStream fis = null;
            String url = attr.getValue(String.class);
            try {
                String destination = directory + hash + ".tmp";
                transferDispatcher.getStreamAndSave(url, destination, true);

                // start reading
                fis = new DataInputStream(new FileInputStream(destination));

                // skip manually added content-type
                int skipSize = fis.readInt();
                for (int i = 0; i < skipSize; i++) {
                    fis.read();
                }

                byte[] hashBytes = Hashing.hashSHA1(fis);
                IOUtils.closeQuietly(fis);
                if (hash.equalsIgnoreCase(Utils.hexStringFromBytes(hashBytes))) {
                    LOG.info("Hash of downloaded file is valid: " + url);
                    LOG.log(DemoLevel.DEMO, "(NODE ) Hash of downloaded file is valid. Will be cached.");
                    File old = new File(destination);
                    File newFile = new File(destination.substring(0, destination.lastIndexOf('.')));
                    old.renameTo(newFile);
                    addLocator(dataObject);
                    cached.add(newFile.getName());

                    return true;
                } else {
                    LOG.log(DemoLevel.DEMO, "(NODE ) Hash of downloaded file is invalid. Trying next locator");
                    LOG.warn("Hash of downloaded file is not valid: " + url);
                    LOG.warn("Trying next locator");
                }

            } catch (FileNotFoundException ex) {
                LOG.warn("Error downloading:" + url);
            } catch (IOException e) {
                LOG.warn("Error hashing:" + url);
            } catch (Exception e) {
                LOG.warn("Error hashing, but file was OK: " + url);
                // e.printStackTrace();
            } finally {
                IOUtils.closeQuietly(fis);
                rebuildCache();
            }
        }
        LOG.warn("Could not find reliable source to cache: " + dataObject);
        return false;
    } else {
        LOG.log(DemoLevel.DEMO, "(NODE ) DataObject has already been cached. Adding locator.");
        addLocator(dataObject);
        return true;
    }
}

From source file:org.apache.fop.render.pdf.ImageRawPNGAdapter.java

/** {@inheritDoc} */
public void setup(PDFDocument doc) {
    super.setup(doc);
    ColorModel cm = ((ImageRawPNG) this.image).getColorModel();
    if (cm instanceof IndexColorModel) {
        numberOfInterleavedComponents = 1;
    } else {// w  w  w  . jav  a 2s .c  o m
        // this can be 1 (gray), 2 (gray + alpha), 3 (rgb) or 4 (rgb + alpha)
        // numberOfInterleavedComponents = (cm.hasAlpha() ? 1 : 0) + cm.getNumColorComponents();
        numberOfInterleavedComponents = cm.getNumComponents();
    }

    // set up image compression for non-alpha channel
    FlateFilter flate;
    try {
        flate = new FlateFilter();
        flate.setApplied(true);
        flate.setPredictor(FlateFilter.PREDICTION_PNG_OPT);
        if (numberOfInterleavedComponents < 3) {
            // means palette (1) or gray (1) or gray + alpha (2)
            flate.setColors(1);
        } else {
            // means rgb (3) or rgb + alpha (4)
            flate.setColors(3);
        }
        flate.setColumns(image.getSize().getWidthPx());
        flate.setBitsPerComponent(this.getBitsPerComponent());
    } catch (PDFFilterException e) {
        throw new RuntimeException("FlateFilter configuration error", e);
    }
    this.pdfFilter = flate;
    this.disallowMultipleFilters();

    // Handle transparency channel if applicable; note that for palette images the transparency is
    // not TRANSLUCENT
    if (cm.hasAlpha() && cm.getTransparency() == ColorModel.TRANSLUCENT) {
        doc.getProfile().verifyTransparencyAllowed(image.getInfo().getOriginalURI());
        // TODO: Implement code to combine image with background color if transparency is not allowed
        // here we need to inflate the PNG pixel data, which includes alpha, separate the alpha channel
        // and then deflate it back again
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        DeflaterOutputStream dos = new DeflaterOutputStream(baos, new Deflater());
        InputStream in = ((ImageRawStream) image).createInputStream();
        try {
            InflaterInputStream infStream = new InflaterInputStream(in, new Inflater());
            DataInputStream dataStream = new DataInputStream(infStream);
            // offset is the byte offset of the alpha component
            int offset = numberOfInterleavedComponents - 1; // 1 for GA, 3 for RGBA
            int numColumns = image.getSize().getWidthPx();
            int bytesPerRow = numberOfInterleavedComponents * numColumns;
            int filter;
            // read line by line; the first byte holds the filter
            while ((filter = dataStream.read()) != -1) {
                byte[] bytes = new byte[bytesPerRow];
                dataStream.readFully(bytes, 0, bytesPerRow);
                dos.write((byte) filter);
                for (int j = 0; j < numColumns; j++) {
                    dos.write(bytes, offset, 1);
                    offset += numberOfInterleavedComponents;
                }
                offset = numberOfInterleavedComponents - 1;
            }
            dos.close();
        } catch (IOException e) {
            throw new RuntimeException("Error processing transparency channel:", e);
        } finally {
            IOUtils.closeQuietly(in);
        }
        // set up alpha channel compression
        FlateFilter transFlate;
        try {
            transFlate = new FlateFilter();
            transFlate.setApplied(true);
            transFlate.setPredictor(FlateFilter.PREDICTION_PNG_OPT);
            transFlate.setColors(1);
            transFlate.setColumns(image.getSize().getWidthPx());
            transFlate.setBitsPerComponent(this.getBitsPerComponent());
        } catch (PDFFilterException e) {
            throw new RuntimeException("FlateFilter configuration error", e);
        }
        BitmapImage alphaMask = new BitmapImage("Mask:" + this.getKey(), image.getSize().getWidthPx(),
                image.getSize().getHeightPx(), baos.toByteArray(), null);
        alphaMask.setPDFFilter(transFlate);
        alphaMask.disallowMultipleFilters();
        alphaMask.setColorSpace(new PDFDeviceColorSpace(PDFDeviceColorSpace.DEVICE_GRAY));
        softMask = doc.addImage(null, alphaMask).makeReference();
    }
}

From source file:org.apache.fop.render.ps.ImageEncoderPNG.java

/** {@inheritDoc} */
public void writeTo(OutputStream out) throws IOException {
    // TODO: refactor this code with equivalent PDF code
    InputStream in = ((ImageRawStream) image).createInputStream();
    try {// w w w . j  a v a2  s .  com
        if (numberOfInterleavedComponents == 1 || numberOfInterleavedComponents == 3) {
            // means we have Gray, RGB, or Palette
            IOUtils.copy(in, out);
        } else {
            // means we have Gray + alpha or RGB + alpha
            int numBytes = numberOfInterleavedComponents - 1; // 1 for Gray, 3 for RGB
            int numColumns = image.getSize().getWidthPx();
            InflaterInputStream infStream = new InflaterInputStream(in, new Inflater());
            DataInputStream dataStream = new DataInputStream(infStream);
            int offset = 0;
            int bytesPerRow = numberOfInterleavedComponents * numColumns;
            int filter;
            // here we need to inflate the PNG pixel data, which includes alpha, separate the alpha
            // channel and then deflate the RGB channels back again
            // TODO: not using the baos below and using the original out instead (as happens in PDF)
            // would be preferable but that does not work with the rest of the postscript code; this
            // needs to be revisited
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            DeflaterOutputStream dos = new DeflaterOutputStream(/* out */baos, new Deflater());
            while ((filter = dataStream.read()) != -1) {
                byte[] bytes = new byte[bytesPerRow];
                dataStream.readFully(bytes, 0, bytesPerRow);
                dos.write((byte) filter);
                for (int j = 0; j < numColumns; j++) {
                    dos.write(bytes, offset, numBytes);
                    offset += numberOfInterleavedComponents;
                }
                offset = 0;
            }
            dos.close();
            IOUtils.copy(new ByteArrayInputStream(baos.toByteArray()), out);
        }
    } finally {
        IOUtils.closeQuietly(in);
    }
}

From source file:netinf.node.chunking.ChunkedBO.java

/**
 * Constructor/*from www  . j a  va2s  .  c  o m*/
 * 
 * @param filePath
 * @param sizeInBytes
 * @throws FileNotFoundException
 */
public ChunkedBO(String filePath) throws FileNotFoundException {
    if (filePath == null) {
        throw new FileNotFoundException("Given file does not exist");
    }

    chunkList = new ArrayList<Chunk>();

    // generate Chunks
    File file = new File(filePath);
    DataInputStream readStream = null;
    if (file.exists()) {
        try {
            readStream = new DataInputStream(new FileInputStream(file));
            String hash = null;
            int chunkCount = 0;
            byte[] tempBuf;
            long fileSize = file.length();
            ByteArrayOutputStream outStream = null;

            for (chunkCount = 0; chunkCount < fileSize / chunkSizeInBytes; chunkCount++) {
                outStream = new ByteArrayOutputStream(chunkSizeInBytes);

                for (int byteCount = 0; byteCount < chunkSizeInBytes; byteCount++) {
                    outStream.write(readStream.read());
                }

                tempBuf = outStream.toByteArray();
                hash = Utils.hexStringFromBytes(Hashing.hashSHA1(new ByteArrayInputStream(tempBuf)));
                chunkList.add(new Chunk(hash, chunkCount));
                // close the file
                outStream.close();
            }

            // loop for the last chunk (which may be smaller than the chunk size)
            if (fileSize != chunkSizeInBytes * (chunkCount - 1)) {
                // open the output file
                outStream = new ByteArrayOutputStream(chunkSizeInBytes);

                // write the rest of the file
                int b;
                while ((b = readStream.read()) != -1) {
                    outStream.write(b);
                }

                tempBuf = outStream.toByteArray();
                hash = Utils.hexStringFromBytes(Hashing.hashSHA1(new ByteArrayInputStream(tempBuf)));
                chunkList.add(new Chunk(hash, chunkCount));

                // close the file
                outStream.close();
            }

            // set total number
            totalNoOfChunks = chunkList.size();

        } catch (IOException e) {
            LOG.warn("(ChunkedBO ) error while creating chunks: " + e.getMessage());
        } finally {
            IOUtils.closeQuietly(readStream);
        }
    } else {
        throw new FileNotFoundException("(ChunkedBO ) File not found: " + filePath);
    }
}

From source file:org.opencastproject.capture.impl.SchedulerImplTest.java

private String readFile(URL target) {
    StringBuilder sb = new StringBuilder();
    DataInputStream in = null;
    try {/*from  w w w  .  j a  v  a 2  s  .  c om*/
        in = new DataInputStream(target.openStream());
        int c = 0;
        while ((c = in.read()) != -1) {
            sb.append((char) c);
        }
    } catch (IOException e) {
        System.out.println("IOException reading file " + target);
    } finally {
        if (in != null) {
            IOUtils.closeQuietly(in);
        }
    }
    return sb.toString();
}

From source file:br.org.indt.ndg.servlets.PostResults.java

private String Decompress(HttpServletRequest request) {
    DataInputStream dis = null;
    DataInputStream objIn = null;
    ByteArrayOutputStream baos = null;
    String result = null;/*from w ww . j  a  v a2 s. c  om*/

    try {
        dis = new DataInputStream(request.getInputStream());
        baos = new ByteArrayOutputStream();

        int length, uncomplength = 0;
        int data = 0;

        uncomplength = dis.readInt();
        length = dis.readInt();

        for (int i = 0; i < length; i++) {
            data = dis.read();
            baos.write((byte) data);
        }

        ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
        ZInputStream zIn = new ZInputStream(bais);
        objIn = new DataInputStream(zIn);

        byte[] bytes = new byte[uncomplength];
        objIn.readFully(bytes);

        result = new String(bytes, ENCODING);

        log.info("Compressed length: " + length + " bytes");
        log.info("Decompressed length: " + result.getBytes().length + " bytes");

        zIn.close();
        dis.close();
        baos.close();
        objIn.close();
    } catch (EOFException e) {
        servletError = true;
        log.error(e);
    } catch (IOException e) {
        servletError = true;
        log.error(e);
    } catch (Exception e) {
        servletError = true;
        log.error(e);
    }

    return result;
}

From source file:org.opensc.pkcs15.token.impl.CardOSToken.java

private DF selectDFInternal(CommandAPDU cmd, TokenPath targetPath) throws IOException {

    try {//from w w  w.  ja v  a  2  s. c  om
        ResponseAPDU resp = this.channel.transmit(cmd);

        DataInputStream dis = getSelectFileData(resp);

        long bodySize = 0;
        int acLifeCycle = TokenFileAcl.AC_ALWAYS;
        int acUpdate = TokenFileAcl.AC_ALWAYS;
        int acAppend = TokenFileAcl.AC_ALWAYS;
        int acDeactivate = TokenFileAcl.AC_ALWAYS;
        int acActivate = TokenFileAcl.AC_ALWAYS;
        int acDelete = TokenFileAcl.AC_ALWAYS;
        int acAdmin = TokenFileAcl.AC_ALWAYS;
        int acCreate = TokenFileAcl.AC_ALWAYS;

        int tag;

        while ((tag = dis.read()) >= 0) {
            int n = dis.read();
            if (n < 0)
                break;

            switch (tag) {
            case 0x81:
                if (n != 2)
                    throw new IOException("Invalid length [" + n + "] of FCI tag 0x81.");
                bodySize = dis.readUnsignedShort();
                break;

            case 0x83:
                if (n != 2)
                    throw new IOException("Invalid length [" + n + "] of FCI tag 0x83.");
                int tpath = dis.readUnsignedShort();
                if (tpath != targetPath.getTailID())
                    throw new IOException("File ID [" + PathHelper.formatID(tpath)
                            + "] reported by SELECT FILE differs from requested ID ["
                            + PathHelper.formatID(targetPath.getTailID()) + "].");
                break;

            case 0x86:
                if (n >= 1)
                    acLifeCycle = dis.read();
                if (n >= 2)
                    acUpdate = dis.read();
                if (n >= 3)
                    acAppend = dis.read();
                if (n >= 4)
                    acDeactivate = dis.read();
                if (n >= 5)
                    acActivate = dis.read();
                if (n >= 6)
                    acDelete = dis.read();
                if (n >= 7)
                    acAdmin = dis.read();
                if (n >= 8)
                    acCreate = dis.read();

                if (n != 8)
                    log.warn("Invalid length [" + n + "] of FCI tag 0x86 for DF.");

                if (n > 8)
                    dis.skipBytes(n - 8);
                break;

            default:
                byte[] tmp = new byte[n];
                dis.readFully(tmp);
                log.warn("skipping FCI tag [0x" + Integer.toHexString(tag) + "], data [" + Util.asHex(tmp)
                        + "].");
            }
        }

        DF df = new DF(targetPath, bodySize, acLifeCycle, acUpdate, acAppend, acDeactivate, acActivate,
                acDelete, acAdmin, acCreate);

        this.currentFile = df;
        return df;

    } catch (CardException e) {
        throw new PKCS15Exception("Error sending select MF", e);
    }
}

From source file:org.opensc.pkcs15.token.impl.CardOSToken.java

@Override
public EF selectEF(int path) throws IOException {

    if (this.currentFile == null)
        throw new IOException("No current DF selected.");

    // SELECT FILE, P1=0x02, P2=0x00, no data -> select EF
    CommandAPDU cmd = new CommandAPDU(0x00, 0xA4, 0x02, 0x00, PathHelper.idToPath(path), DEFAULT_LE);

    try {/*w w  w .  j a v  a  2  s .c  o  m*/
        ResponseAPDU resp = this.channel.transmit(cmd);

        DataInputStream dis = getSelectFileData(resp);

        long fileSize = 0;
        int acRead = TokenFileAcl.AC_ALWAYS;
        int acUpdate = TokenFileAcl.AC_ALWAYS;
        int acAppend = TokenFileAcl.AC_ALWAYS;
        int acDeactivate = TokenFileAcl.AC_ALWAYS;
        int acActivate = TokenFileAcl.AC_ALWAYS;
        int acDelete = TokenFileAcl.AC_ALWAYS;
        int acAdmin = TokenFileAcl.AC_ALWAYS;
        int acIncrease = TokenFileAcl.AC_ALWAYS;
        int acDecrease = TokenFileAcl.AC_ALWAYS;

        int tag;

        while ((tag = dis.read()) >= 0) {
            int n = dis.read();
            if (n < 0)
                break;

            switch (tag) {
            case 0x80:
                if (n != 2)
                    throw new IOException("Invalid length [" + n + "] of FCI tag 0x80.");
                fileSize = dis.readUnsignedShort();
                break;

            case 0x83:
                if (n != 2)
                    throw new IOException("Invalid length [" + n + "] of FCI tag 0x83.");
                int tpath = dis.readUnsignedShort();
                if (tpath != path)
                    throw new IOException("File ID [" + PathHelper.formatID(tpath)
                            + "] reported by SELECT FILE differs from requested ID ["
                            + PathHelper.formatID(path) + "].");
                break;

            case 0x86:
                if (n >= 1)
                    acRead = dis.read();
                if (n >= 2)
                    acUpdate = dis.read();
                if (n >= 3)
                    acAppend = dis.read();
                if (n >= 4)
                    acDeactivate = dis.read();
                if (n >= 5)
                    acActivate = dis.read();
                if (n >= 6)
                    acDelete = dis.read();
                if (n >= 7)
                    acAdmin = dis.read();
                if (n >= 8)
                    acIncrease = dis.read();
                if (n >= 9)
                    acDecrease = dis.read();

                if (n != 9)
                    log.warn("Invalid length [" + n + "] of FCI tag 0x86 for EF.");

                if (n > 9)
                    dis.skipBytes(n - 9);
                break;

            default:
                byte[] tmp = new byte[n];
                dis.readFully(tmp);
                log.warn("skipping FCI tag [0x" + Integer.toHexString(tag) + "], data [" + Util.asHex(tmp)
                        + "].");
            }
        }

        EF ef = new EF(new TokenPath(this.currentFile.getPath(), path), fileSize, acRead, acUpdate, acAppend,
                acDeactivate, acActivate, acDelete, acAdmin, acIncrease, acDecrease);

        this.currentFile = ef;
        return ef;

    } catch (CardException e) {
        throw new PKCS15Exception("Error sending select MF", e);
    }
}

From source file:org.opensc.pkcs15.token.impl.CardOSToken.java

@Override
public TokenFile select(int path) throws IOException {

    if (this.currentFile == null)
        throw new IOException("No current DF selected.");

    // SELECT FILE, P1=0x00, P2=0x00, ID -> select EF or DF
    CommandAPDU cmd = new CommandAPDU(0x00, 0xA4, 0x00, 0x00, PathHelper.idToPath(path), DEFAULT_LE);

    try {//ww  w . j  av a  2  s.co  m
        ResponseAPDU resp = this.channel.transmit(cmd);

        DataInputStream dis = getSelectFileData(resp);

        long bodySize = -1;
        long fileSize = -1;
        int acRead = TokenFileAcl.AC_ALWAYS;
        int acUpdate = TokenFileAcl.AC_ALWAYS;
        int acAppend = TokenFileAcl.AC_ALWAYS;
        int acDeactivate = TokenFileAcl.AC_ALWAYS;
        int acActivate = TokenFileAcl.AC_ALWAYS;
        int acDelete = TokenFileAcl.AC_ALWAYS;
        int acAdmin = TokenFileAcl.AC_ALWAYS;
        int acIncrease = TokenFileAcl.AC_ALWAYS;
        int acDecrease = TokenFileAcl.AC_ALWAYS;

        int tag;

        while ((tag = dis.read()) >= 0) {
            int n = dis.read();
            if (n < 0)
                break;

            switch (tag) {
            case 0x80:
                if (n != 2)
                    throw new IOException("Invalid length [" + n + "] of FCI tag 0x80.");
                fileSize = dis.readUnsignedShort();
                break;

            case 0x83:
                if (n != 2)
                    throw new IOException("Invalid length [" + n + "] of FCI tag 0x83.");
                int tpath = dis.readUnsignedShort();
                if (tpath != path)
                    throw new IOException("File ID [" + PathHelper.formatID(tpath)
                            + "] reported by SELECT FILE differs from requested ID ["
                            + PathHelper.formatID(path) + "].");
                break;

            case 0x81:
                if (n != 2)
                    throw new IOException("Invalid length [" + n + "] of FCI tag 0x81.");
                bodySize = dis.readUnsignedShort();
                break;

            case 0x86:
                if (n >= 1)
                    acRead = dis.read();
                if (n >= 2)
                    acUpdate = dis.read();
                if (n >= 3)
                    acAppend = dis.read();
                if (n >= 4)
                    acDeactivate = dis.read();
                if (n >= 5)
                    acActivate = dis.read();
                if (n >= 6)
                    acDelete = dis.read();
                if (n >= 7)
                    acAdmin = dis.read();
                if (n >= 8)
                    acIncrease = dis.read();
                if (n >= 9)
                    acDecrease = dis.read();

                if (n != 9 && n != 8)
                    log.warn("Invalid length [" + n + "] of FCI tag 0x86 for EF.");

                if (n > 9)
                    dis.skipBytes(n - 9);
                break;

            default:
                byte[] tmp = new byte[n];
                dis.readFully(tmp);
                log.warn("skipping FCI tag [0x" + Integer.toHexString(tag) + "], data [" + Util.asHex(tmp)
                        + "].");
            }
        }

        if (fileSize >= 0)
            this.currentFile = new EF(new TokenPath(this.currentFile.getPath(), path), fileSize, acRead,
                    acUpdate, acAppend, acDeactivate, acActivate, acDelete, acAdmin, acIncrease, acDecrease);
        else if (bodySize >= 0)
            this.currentFile = new DF(new TokenPath(this.currentFile.getPath(), path), bodySize, acRead,
                    acUpdate, acAppend, acDeactivate, acActivate, acDelete, acAdmin, acIncrease);
        else
            throw new IOException("No 0x80 or 0x81 tag specified in order to distinguish between DF an EF.");

        return this.currentFile;

    } catch (CardException e) {
        throw new PKCS15Exception("Error sending SELECT FILE", e);
    }
}