Example usage for java.nio ByteBuffer remaining

List of usage examples for java.nio ByteBuffer remaining

Introduction

In this page you can find the example usage for java.nio ByteBuffer remaining.

Prototype

public final int remaining() 

Source Link

Document

Returns the number of remaining elements in this buffer, that is limit - position .

Usage

From source file:com.tomagoyaky.jdwp.IOUtils.java

/**
 * Reads the requested number of bytes or fail if there are not enough left.
 * <p/>/*  w w  w.  j  a v  a  2s .  c o  m*/
 * This allows for the possibility that {@link ReadableByteChannel#read(ByteBuffer)} may
 * not read as many bytes as requested (most likely because of reaching EOF).
 *
 * @param input the byte channel to read
 * @param buffer byte buffer destination
 * @throws IOException  if there is a problem reading the file
 * @throws EOFException if the number of bytes read was incorrect
 * @since 2.2
 */
public static void readFully(final ReadableByteChannel input, final ByteBuffer buffer) throws IOException {
    final int expected = buffer.remaining();
    final int actual = read(input, buffer);
    if (actual != expected) {
        throw new EOFException("Length to read: " + expected + " actual: " + actual);
    }
}

From source file:com.tomagoyaky.jdwp.IOUtils.java

/**
 * Reads bytes from a ReadableByteChannel.
 * <p/>/*from  www.  j av  a 2s.  c  om*/
 * This implementation guarantees that it will read as many bytes
 * as possible before giving up; this may not always be the case for
 * subclasses of {@link ReadableByteChannel}.
 *
 * @param input the byte channel to read
 * @param buffer byte buffer destination
 * @return the actual length read; may be less than requested if EOF was reached
 * @throws IOException if a read error occurs
 * @since 2.2
 */
public static int read(final ReadableByteChannel input, final ByteBuffer buffer) throws IOException {
    final int length = buffer.remaining();
    while (buffer.remaining() > 0) {
        final int count = input.read(buffer);
        if (EOF == count) { // EOF
            break;
        }
    }
    return length - buffer.remaining();
}

From source file:org.apache.bookkeeper.bookie.Bookie.java

/**
 * Add an entry to a ledger as specified by handle.
 *///from w ww  .ja  v  a2  s .c  o m
private void addEntryInternal(LedgerDescriptor handle, ByteBuffer entry, WriteCallback cb, Object ctx)
        throws IOException, BookieException {
    long ledgerId = handle.getLedgerId();
    entry.rewind();
    long entryId = handle.addEntry(entry);

    entry.rewind();
    writeBytes.add(entry.remaining());

    LOG.trace("Adding {}@{}", entryId, ledgerId);
    journal.logAddEntry(entry, cb, ctx);
}

From source file:hivemall.fm.FactorizationMachineUDTF.java

protected void recordTrain(@Nonnull final Feature[] x, final double y) throws HiveException {
    if (_iterations <= 1) {
        return;//from   w w w.  ja v  a  2s.c o m
    }

    ByteBuffer inputBuf = _inputBuf;
    NioStatefullSegment dst = _fileIO;
    if (inputBuf == null) {
        final File file;
        try {
            file = File.createTempFile("hivemall_fm", ".sgmt");
            file.deleteOnExit();
            if (!file.canWrite()) {
                throw new UDFArgumentException("Cannot write a temporary file: " + file.getAbsolutePath());
            }
            LOG.info("Record training examples to a file: " + file.getAbsolutePath());
        } catch (IOException ioe) {
            throw new UDFArgumentException(ioe);
        } catch (Throwable e) {
            throw new UDFArgumentException(e);
        }

        this._inputBuf = inputBuf = ByteBuffer.allocateDirect(1024 * 1024); // 1 MiB
        this._fileIO = dst = new NioStatefullSegment(file, false);
    }

    int xBytes = Feature.requiredBytes(x);
    int recordBytes = (Integer.SIZE + Double.SIZE) / 8 + xBytes;
    int requiredBytes = (Integer.SIZE / 8) + recordBytes;
    int remain = inputBuf.remaining();
    if (remain < requiredBytes) {
        writeBuffer(inputBuf, dst);
    }

    inputBuf.putInt(recordBytes);
    inputBuf.putInt(x.length);
    for (Feature f : x) {
        f.writeTo(inputBuf);
    }
    inputBuf.putDouble(y);
}

From source file:org.apache.hadoop.hbase.ipc.AsyncRpcChannelImpl.java

/**
 * Write request to channel//from   w w  w.  j a va2  s  .c  o m
 * @param call to write
 */
private void writeRequest(final AsyncCall call) {
    try {
        final RPCProtos.RequestHeader.Builder requestHeaderBuilder = RPCProtos.RequestHeader.newBuilder();
        requestHeaderBuilder.setCallId(call.id).setMethodName(call.method.getName())
                .setRequestParam(call.param != null);

        if (Trace.isTracing()) {
            Span s = Trace.currentSpan();
            requestHeaderBuilder.setTraceInfo(
                    TracingProtos.RPCTInfo.newBuilder().setParentId(s.getSpanId()).setTraceId(s.getTraceId()));
        }

        ByteBuffer cellBlock = client.buildCellBlock(call.cellScanner());
        if (cellBlock != null) {
            final RPCProtos.CellBlockMeta.Builder cellBlockBuilder = RPCProtos.CellBlockMeta.newBuilder();
            cellBlockBuilder.setLength(cellBlock.limit());
            requestHeaderBuilder.setCellBlockMeta(cellBlockBuilder.build());
        }
        // Only pass priority if there one. Let zero be same as no priority.
        if (call.getPriority() != PayloadCarryingRpcController.PRIORITY_UNSET) {
            requestHeaderBuilder.setPriority(call.getPriority());
        }
        requestHeaderBuilder
                .setTimeout(call.rpcTimeout > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) call.rpcTimeout);

        RPCProtos.RequestHeader rh = requestHeaderBuilder.build();

        int totalSize = IPCUtil.getTotalSizeWhenWrittenDelimited(rh, call.param);
        if (cellBlock != null) {
            totalSize += cellBlock.remaining();
        }

        ByteBuf b = channel.alloc().directBuffer(4 + totalSize);
        try (ByteBufOutputStream out = new ByteBufOutputStream(b)) {
            call.callStats.setRequestSizeBytes(IPCUtil.write(out, rh, call.param, cellBlock));
        }

        channel.writeAndFlush(b).addListener(new CallWriteListener(this, call.id));
    } catch (IOException e) {
        close(e);
    }
}

From source file:org.dcache.chimera.JdbcFs.java

@Override
public FsInode inodeFromBytes(byte[] handle) throws ChimeraFsException {
    FsInode inode;/*  w w w  .  ja v  a  2 s . com*/

    if (handle.length < MIN_HANDLE_LEN) {
        throw new FileNotFoundHimeraFsException("File handle too short");
    }

    ByteBuffer b = ByteBuffer.wrap(handle);
    int fsid = b.get();
    int type = b.get();
    int len = b.get(); // eat the file id size.
    long ino = b.getLong();
    int opaqueLen = b.get();
    if (opaqueLen > b.remaining()) {
        throw new FileNotFoundHimeraFsException("Bad Opaque len");
    }

    byte[] opaque = new byte[opaqueLen];
    b.get(opaque);

    FsInodeType inodeType = FsInodeType.valueOf(type);

    switch (inodeType) {
    case INODE:
        int level = Integer.parseInt(new String(opaque));
        inode = new FsInode(this, ino, level);
        break;

    case ID:
        inode = new FsInode_ID(this, ino);
        break;

    case TAGS:
        inode = new FsInode_TAGS(this, ino);
        break;

    case TAG:
        String tag = new String(opaque);
        inode = new FsInode_TAG(this, ino, tag);
        break;

    case NAMEOF:
        inode = new FsInode_NAMEOF(this, ino);
        break;
    case PARENT:
        inode = new FsInode_PARENT(this, ino);
        break;

    case PATHOF:
        inode = new FsInode_PATHOF(this, ino);
        break;

    case CONST:
        inode = new FsInode_CONST(this, ino);
        break;

    case PSET:
        inode = new FsInode_PSET(this, ino, getArgs(opaque));
        break;

    case PCUR:
        inode = new FsInode_PCUR(this, ino);
        break;

    case PLOC:
        inode = new FsInode_PLOC(this, ino);
        break;

    case PCRC:
        inode = new FsInode_PCRC(this, ino);
        break;

    default:
        throw new FileNotFoundHimeraFsException("Unsupported file handle type: " + inodeType);
    }
    return inode;
}

From source file:hivemall.recommend.SlimUDTF.java

private void recordTrainingInput(final int itemI, @Nonnull final Int2ObjectMap<Int2FloatMap> knnItems,
        final int numKNNItems) throws HiveException {
    ByteBuffer buf = this._inputBuf;
    NioStatefulSegment dst = this._fileIO;

    if (buf == null) {
        // invoke only at task node (initialize is also invoked in compilation)
        final File file;
        try {//from   w  ww .  j a  v  a 2  s  .com
            file = File.createTempFile("hivemall_slim", ".sgmt"); // to save KNN data
            file.deleteOnExit();
            if (!file.canWrite()) {
                throw new UDFArgumentException("Cannot write a temporary file: " + file.getAbsolutePath());
            }
        } catch (IOException ioe) {
            throw new UDFArgumentException(ioe);
        }

        this._inputBuf = buf = ByteBuffer.allocateDirect(8 * 1024 * 1024); // 8MB
        this._fileIO = dst = new NioStatefulSegment(file, false);
    }

    int recordBytes = SizeOf.INT + SizeOf.INT + SizeOf.INT * 2 * knnItems.size()
            + (SizeOf.INT + SizeOf.FLOAT) * numKNNItems;
    int requiredBytes = SizeOf.INT + recordBytes; // need to allocate space for "recordBytes" itself

    int remain = buf.remaining();
    if (remain < requiredBytes) {
        writeBuffer(buf, dst);
    }

    buf.putInt(recordBytes);
    buf.putInt(itemI);
    buf.putInt(knnItems.size());

    for (Int2ObjectMap.Entry<Int2FloatMap> e1 : Fastutil.fastIterable(knnItems)) {
        int user = e1.getIntKey();
        buf.putInt(user);

        Int2FloatMap ru = e1.getValue();
        buf.putInt(ru.size());
        for (Int2FloatMap.Entry e2 : Fastutil.fastIterable(ru)) {
            buf.putInt(e2.getIntKey());
            buf.putFloat(e2.getFloatValue());
        }
    }
}

From source file:hivemall.topicmodel.ProbabilisticTopicModelBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = model.getDocCount();

    long numTrain = numTrainingExamples / miniBatchSize;
    if (numTrainingExamples % miniBatchSize != 0L) {
        numTrain++;/*w  ww.ja  v a2  s  .  co  m*/
    }

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.topicmodel.ProbabilisticTopicModel$Counter", "iteration");

    try {
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int wcLength = buf.getInt();
                    final String[] wordCounts = new String[wcLength];
                    for (int j = 0; j < wcLength; j++) {
                        wordCounts[j] = NIOUtils.getString(buf);
                    }
                    update(wordCounts);
                }
                buf.rewind();

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;
                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int wcLength = buf.getInt();
                        final String[] wordCounts = new String[wcLength];
                        for (int j = 0; j < wcLength; j++) {
                            wordCounts[j] = NIOUtils.getString(buf);
                        }
                        update(wordCounts);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:com.hortonworks.hbase.replication.bridge.HBaseServer.java

/**
 * This is a wrapper around {@link java.nio.channels.WritableByteChannel#write(java.nio.ByteBuffer)}.
 * If the amount of data is large, it writes to channel in smaller chunks.
 * This is to avoid jdk from creating many direct buffers as the size of
 * buffer increases. This also minimizes extra copies in NIO layer
 * as a result of multiple write operations required to write a large
 * buffer.//  w  w w  .  j  a  v a2  s  .c  o  m
 *
 * @param channel writable byte channel to write to
 * @param buffer buffer to write
 * @return number of bytes written
 * @throws java.io.IOException e
 * @see java.nio.channels.WritableByteChannel#write(java.nio.ByteBuffer)
 */
protected int channelWrite(WritableByteChannel channel, ByteBuffer buffer) throws IOException {

    int count = (buffer.remaining() <= NIO_BUFFER_LIMIT) ? channel.write(buffer)
            : channelIO(null, channel, buffer);
    if (count > 0) {
        rpcMetrics.sentBytes.inc(count);
    }
    return count;
}

From source file:com.hortonworks.hbase.replication.bridge.HBaseServer.java

/**
 * This is a wrapper around {@link java.nio.channels.ReadableByteChannel#read(java.nio.ByteBuffer)}.
 * If the amount of data is large, it writes to channel in smaller chunks.
 * This is to avoid jdk from creating many direct buffers as the size of
 * ByteBuffer increases. There should not be any performance degredation.
 *
 * @param channel writable byte channel to write on
 * @param buffer buffer to write/*  w  w  w .java  2  s  .  co m*/
 * @return number of bytes written
 * @throws java.io.IOException e
 * @see java.nio.channels.ReadableByteChannel#read(java.nio.ByteBuffer)
 */
protected int channelRead(ReadableByteChannel channel, ByteBuffer buffer) throws IOException {

    int count = (buffer.remaining() <= NIO_BUFFER_LIMIT) ? channel.read(buffer)
            : channelIO(channel, null, buffer);
    if (count > 0) {
        rpcMetrics.receivedBytes.inc(count);
    }
    return count;
}