Example usage for java.nio ByteBuffer capacity

List of usage examples for java.nio ByteBuffer capacity

Introduction

In this page you can find the example usage for java.nio ByteBuffer capacity.

Prototype

public final int capacity() 

Source Link

Document

Returns the capacity of this buffer.

Usage

From source file:org.apache.hadoop.hdfs.tools.offlineEditsViewer.TestOfflineEditsViewer.java

/**
 * Compare two files, ignore trailing zeros at the end, for edits log the
 * trailing zeros do not make any difference, throw exception is the files are
 * not same//  www.ja  v a2 s  .c om
 *
 * @param filenameSmall first file to compare (doesn't have to be smaller)
 * @param filenameLarge second file to compare (doesn't have to be larger)
 */
private boolean filesEqualIgnoreTrailingZeros(String filenameSmall, String filenameLarge) throws IOException {

    ByteBuffer small = ByteBuffer.wrap(DFSTestUtil.loadFile(filenameSmall));
    ByteBuffer large = ByteBuffer.wrap(DFSTestUtil.loadFile(filenameLarge));
    // OEV outputs with the latest layout version, so tweak the old file's
    // contents to have latest version so checkedin binary files don't
    // require frequent updates
    small.put(3, (byte) NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);

    // now correct if it's otherwise
    if (small.capacity() > large.capacity()) {
        ByteBuffer tmpByteBuffer = small;
        small = large;
        large = tmpByteBuffer;
        String tmpFilename = filenameSmall;
        filenameSmall = filenameLarge;
        filenameLarge = tmpFilename;
    }

    // compare from 0 to capacity of small
    // the rest of the large should be all zeros
    small.position(0);
    small.limit(small.capacity());
    large.position(0);
    large.limit(small.capacity());

    // compares position to limit
    if (!small.equals(large)) {
        return false;
    }

    // everything after limit should be 0xFF
    int i = large.limit();
    large.clear();
    for (; i < large.capacity(); i++) {
        if (large.get(i) != FSEditLogOpCodes.OP_INVALID.getOpCode()) {
            return false;
        }
    }

    return true;
}

From source file:com.twinsoft.convertigo.beans.steps.WriteXMLStep.java

protected void writeFile(String filePath, NodeList nodeList) throws EngineException {
    if (nodeList == null) {
        throw new EngineException("Unable to write to xml file: element is Null");
    }/*from  w w w.  j av a 2s  . com*/

    String fullPathName = getAbsoluteFilePath(filePath);
    synchronized (Engine.theApp.filePropertyManager.getMutex(fullPathName)) {
        try {
            String encoding = getEncoding();
            encoding = encoding.length() > 0 && Charset.isSupported(encoding) ? encoding : "UTF-8";
            if (!isReallyAppend(fullPathName)) {
                String tTag = defaultRootTagname.length() > 0 ? StringUtils.normalize(defaultRootTagname)
                        : "document";
                FileUtils.write(new File(fullPathName),
                        "<?xml version=\"1.0\" encoding=\"" + encoding + "\"?>\n<" + tTag + "/>", encoding);
            }

            StringBuffer content = new StringBuffer();

            /* do the content, only append child element */
            for (int i = 0; i < nodeList.getLength(); i++) {
                if (nodeList.item(i).getNodeType() == Node.ELEMENT_NODE) {
                    content.append(XMLUtils.prettyPrintElement((Element) nodeList.item(i), true, true));
                }
            }

            /* detect current xml encoding */
            RandomAccessFile randomAccessFile = null;
            try {
                randomAccessFile = new RandomAccessFile(fullPathName, "rw");
                FileChannel fc = randomAccessFile.getChannel();
                ByteBuffer buf = ByteBuffer.allocate(60);
                int nb = fc.read(buf);
                String sbuf = new String(buf.array(), 0, nb, "ASCII");
                String enc = sbuf.replaceFirst("^.*encoding=\"", "").replaceFirst("\"[\\d\\D]*$", "");

                if (!Charset.isSupported(enc)) {
                    enc = encoding;
                }

                buf.clear();

                /* retrieve last header tag*/
                long pos = fc.size() - buf.capacity();
                if (pos < 0) {
                    pos = 0;
                }

                nb = fc.read(buf, pos);

                boolean isUTF8 = Charset.forName(enc) == Charset.forName("UTF-8");

                if (isUTF8) {
                    for (int i = 0; i < buf.capacity(); i++) {
                        sbuf = new String(buf.array(), i, nb - i, enc);
                        if (!sbuf.startsWith("")) {
                            pos += i;
                            break;
                        }
                    }
                } else {
                    sbuf = new String(buf.array(), 0, nb, enc);
                }

                int lastTagIndex = sbuf.lastIndexOf("</");
                if (lastTagIndex == -1) {
                    int iend = sbuf.lastIndexOf("/>");
                    if (iend != -1) {
                        lastTagIndex = sbuf.lastIndexOf("<", iend);
                        String tagname = sbuf.substring(lastTagIndex + 1, iend);
                        content = new StringBuffer(
                                "<" + tagname + ">\n" + content.toString() + "</" + tagname + ">");
                    } else {
                        throw new EngineException("Malformed XML file");
                    }
                } else {
                    content.append(sbuf.substring(lastTagIndex));

                    if (isUTF8) {
                        String before = sbuf.substring(0, lastTagIndex);
                        lastTagIndex = before.getBytes(enc).length;
                    }
                }
                fc.write(ByteBuffer.wrap(content.toString().getBytes(enc)), pos + lastTagIndex);
            } finally {
                if (randomAccessFile != null) {
                    randomAccessFile.close();
                }
            }
        } catch (IOException e) {
            throw new EngineException("Unable to write to xml file", e);
        } finally {
            Engine.theApp.filePropertyManager.releaseMutex(fullPathName);
        }
    }
}

From source file:fuse4j.hadoopfs.FuseHdfsClient.java

/**
 * write()/*w w w.j a  va 2s.  c o  m*/
 */
public int write(String path, Object fh, boolean isWritepage, ByteBuffer buf, long offset)
        throws FuseException {
    log.info("write(): " + path + " offset: " + offset + " len: " + buf.capacity() + "\n");

    HdfsFileContext ctxt = pinFileContext(path);

    if (ctxt == null) {
        // file not opened
        return FuseException.EPERM;
    }

    if (!ctxt.openedForWrite) {
        unpinFileContext(path);

        // file not opened for write")
        return FuseException.EPERM;
    }

    boolean status = hdfs.write(ctxt.hdfsFile, buf, offset);

    unpinFileContext(path);

    if (!status) {
        // write failed
        return FuseException.EACCES;
    }

    return 0;
}

From source file:jext2.DataInode.java

/**
 * Write data in buffer to disk. This works best when whole blocks which
 * are a multiple of blocksize in size are written. Partial blocks are
 * written by first reading the block and then writing the new data
 * to that buffer than write that new buffer to disk.
 * @throws NoSpaceLeftOnDevice/*from  w  w  w  .j  a  v a 2 s. c  o m*/
 * @throws FileTooLarge
 */
public int writeData(ByteBuffer buf, long offset) throws JExt2Exception, NoSpaceLeftOnDevice, FileTooLarge {
    /*
     * Note on sparse file support:
     * getBlocksAllocate does not care if there are holes. Just write as much
     * blocks as the buffer requires at the desired location an set inode.size
     * accordingly.
     */

    int blocksize = superblock.getBlocksize();
    long start = offset / blocksize;
    long end = (buf.capacity() + blocksize) / blocksize + start;
    int startOff = (int) (offset % blocksize);

    if (startOff > 0)
        end += 1;

    buf.rewind();

    while (start < end) {
        LinkedList<Long> blockNrs = accessData().getBlocksAllocate(start, 1);
        int bytesLeft = buf.capacity() - buf.position();

        if (bytesLeft < blocksize || startOff > 0) { /* write partial block */
            ByteBuffer onDisk = blockAccess.read(blockNrs.getFirst());

            onDisk.position(startOff);

            assert onDisk.limit() == blocksize;

            buf.limit(buf.position() + Math.min(bytesLeft, onDisk.remaining()));

            onDisk.put(buf);

            onDisk.position(startOff);
            blockAccess.writeFromBufferUnsynchronized((blockNrs.getFirst() & 0xffffffff) * blocksize, onDisk);
        } else { /* write whole block */
            buf.limit(buf.position() + blocksize);

            blockAccess.writeFromBufferUnsynchronized((blockNrs.getFirst() & 0xffffffff) * blocksize, buf);
        }

        start += 1;
        startOff = 0;
        accessData().unlockHierarchyChanges();

    }
    int written = buf.position();
    assert written == buf.capacity();

    /* increase inode.size if we grew the file */
    if (offset + written > getSize()) { /* file grew */
        setStatusChangeTime(new Date());
        setSize(offset + written);
    }

    return written;
}

From source file:io.github.dsheirer.source.tuner.hackrf.HackRFTunerController.java

public int read(Request request, int value, int index, int length) throws UsbException {
    if (!(length == 1 || length == 2 || length == 4)) {
        throw new IllegalArgumentException(
                "invalid length [" + length + "] must be: byte=1, short=2, int=4 to read a primitive");
    }/*from   w w w . j  a  v a 2 s  .  co m*/

    ByteBuffer buffer = readArray(request, value, index, length);

    byte[] data = new byte[buffer.capacity()];

    buffer.get(data);

    switch (data.length) {
    case 1:
        return data[0];
    case 2:
        return EndianUtils.readSwappedShort(data, 0);
    case 4:
        return EndianUtils.readSwappedInteger(data, 0);
    default:
        throw new UsbException(
                "read() primitive returned an " + "unrecognized byte array " + Arrays.toString(data));
    }
}

From source file:fuse4j.hadoopfs.FuseHdfsClient.java

/**
 * read()/*from ww w. j  a  v  a 2  s  .c om*/
 */
public int read(String path, Object fh, ByteBuffer buf, long offset) throws FuseException {

    //return Errno.EBADF;
    log.info("read(): " + path + " offset: " + offset + " len: " + buf.capacity() + "\n");

    HdfsFileContext ctxt = pinFileContext(path);

    if (ctxt == null) {
        //file not opened").initErrno(
        return FuseException.EPERM;
    }

    if (ctxt.openedForWrite) {
        unpinFileContext(path);
        //file not opened for read")
        return FuseException.EPERM;
    }

    boolean status = hdfs.read(ctxt.hdfsFile, buf, offset);

    unpinFileContext(path);

    if (!status) {
        // read failed
        return FuseException.EACCES;
    }

    return 0;
}

From source file:ome.services.RawPixelsBean.java

private byte[] bufferAsByteArrayWithExceptionIfNull(ByteBuffer buffer) {
    byte[] b = new byte[buffer.capacity()];
    buffer.get(b, 0, buffer.capacity());
    return b;/*from   www  .  j  a v a 2 s  .  c o  m*/
}

From source file:edu.uci.ics.crawler4j.crawler.fetcher.PageFetcher.java

private boolean loadPage(final Page p, final InputStream in, final int totalsize, final boolean isBinary,
        String encoding) {/*from  w  ww . ja  va  2s  .  co m*/
    ByteBuffer bBuf;

    if (totalsize > 0) {
        bBuf = ByteBuffer.allocate(totalsize + 1024);
    } else {
        bBuf = ByteBuffer.allocate(maxDownloadSize);
    }
    final byte[] b = new byte[1024];
    int len;
    double finished = 0;
    try {
        while ((len = in.read(b)) != -1) {
            if (finished + b.length > bBuf.capacity()) {
                break;
            }
            bBuf.put(b, 0, len);
            finished += len;
        }
    } catch (final BufferOverflowException boe) {
        System.out.println("Page size exceeds maximum allowed.");
        return false;
    } catch (final Exception e) {
        System.err.println(e.getMessage());
        return false;
    }

    bBuf.flip();
    if (isBinary) {
        byte[] tmp = new byte[bBuf.limit()];
        bBuf.get(tmp);
        p.setBinaryData(tmp);
    } else {
        String html = "";
        if (encoding == null) {
            int pos = bBuf.position();
            html = Charset.forName("US-ASCII").decode(bBuf).toString();
            bBuf.position(pos);
            pos = html.toLowerCase().indexOf("<meta http-equiv=\"content-type\" content=\"");
            if (pos >= 0) {
                int end = html.indexOf("\"", pos + 41);
                if (end >= 0) {
                    String content = html.substring(pos, end);
                    if (content.contains("charset=")) {
                        encoding = content.substring(content.indexOf("charset=") + 8);
                    }
                }
            }
        }
        if (encoding == null || !Charset.isSupported(encoding))
            encoding = "UTF-8";

        if (!encoding.equals("UTF-8")) {
            html = Charset.forName(encoding).decode(bBuf).toString();
        }

        if (html.length() == 0) {
            return false;
        }
        p.setHTML(html);
    }
    return true;
}

From source file:org.cryptomator.crypto.aes256.Aes256Cryptor.java

@Override
public Long decryptedContentLength(SeekableByteChannel encryptedFile) throws IOException {
    // skip 128bit IV + 256 bit MAC:
    encryptedFile.position(48);//  ww  w .  j  av  a  2  s. c o m

    // read encrypted value:
    final ByteBuffer encryptedFileSizeBuffer = ByteBuffer.allocate(AES_BLOCK_LENGTH);
    final int numFileSizeBytesRead = encryptedFile.read(encryptedFileSizeBuffer);

    // return "unknown" value, if EOF
    if (numFileSizeBytesRead != encryptedFileSizeBuffer.capacity()) {
        return null;
    }

    // decrypt size:
    try {
        final Cipher sizeCipher = aesEcbCipher(primaryMasterKey, Cipher.DECRYPT_MODE);
        final byte[] decryptedFileSize = sizeCipher.doFinal(encryptedFileSizeBuffer.array());
        final ByteBuffer fileSizeBuffer = ByteBuffer.wrap(decryptedFileSize);
        return fileSizeBuffer.getLong();
    } catch (IllegalBlockSizeException | BadPaddingException e) {
        throw new IllegalStateException(e);
    }
}

From source file:org.commoncrawl.service.queryserver.index.PositionBasedSequenceFileIndex.java

private ByteBuffer loadStreamIntoMemory(Path streamPath) throws IOException {
    //LOG.info("Loading Stream:" + streamPath.getAbsolutePath());
    if (!_fileSystem.exists(streamPath) || _fileSystem.getFileStatus(streamPath).isDir()) {
        throw new IOException("Stream Path:" + streamPath + " Points to Invalid File");
    } else {//  w w w . j a  va 2 s  .  c  o  m
        DataInputStream inputStream = null;
        ByteBuffer bufferOut = null;
        try {

            //LOG.info("Allocating Buffer of size:" + streamPath.length() + " for Stream:" + streamPath.getAbsolutePath());
            bufferOut = ByteBuffer.allocate((int) _fileSystem.getFileStatus(streamPath).getLen());
            inputStream = _fileSystem.open(streamPath);
            long loadStart = System.currentTimeMillis();
            for (int offset = 0, totalRead = 0; offset < bufferOut.capacity();) {
                int bytesToRead = Math.min(16384, bufferOut.capacity() - totalRead);
                inputStream.read(bufferOut.array(), offset, bytesToRead);
                offset += bytesToRead;
                totalRead += bytesToRead;
            }
            //LOG.info("Load of Stream:" + streamPath.getAbsolutePath() + " Took:" + (System.currentTimeMillis() - loadStart) + " MS");
        } finally {
            if (inputStream != null) {
                inputStream.close();
            }
        }

        return bufferOut;
    }
}