Example usage for java.nio ByteBuffer remaining

List of usage examples for java.nio ByteBuffer remaining

Introduction

In this page you can find the example usage for java.nio ByteBuffer remaining.

Prototype

public final int remaining() 

Source Link

Document

Returns the number of remaining elements in this buffer, that is limit - position .

Usage

From source file:org.apache.hadoop.hbase.ipc.AsyncRpcChannel.java

/**
 * Write request to channel/*  ww  w .j av  a 2 s.  c  om*/
 *
 * @param call    to write
 */
private void writeRequest(final AsyncCall call) {
    try {
        final RPCProtos.RequestHeader.Builder requestHeaderBuilder = RPCProtos.RequestHeader.newBuilder();
        requestHeaderBuilder.setCallId(call.id).setMethodName(call.method.getName())
                .setRequestParam(call.param != null);

        if (Trace.isTracing()) {
            Span s = Trace.currentSpan();
            requestHeaderBuilder.setTraceInfo(
                    TracingProtos.RPCTInfo.newBuilder().setParentId(s.getSpanId()).setTraceId(s.getTraceId()));
        }

        ByteBuffer cellBlock = client.buildCellBlock(call.controller.cellScanner());
        if (cellBlock != null) {
            final RPCProtos.CellBlockMeta.Builder cellBlockBuilder = RPCProtos.CellBlockMeta.newBuilder();
            cellBlockBuilder.setLength(cellBlock.limit());
            requestHeaderBuilder.setCellBlockMeta(cellBlockBuilder.build());
        }
        // Only pass priority if there one.  Let zero be same as no priority.
        if (call.controller.getPriority() != 0) {
            requestHeaderBuilder.setPriority(call.controller.getPriority());
        }

        RPCProtos.RequestHeader rh = requestHeaderBuilder.build();

        int totalSize = IPCUtil.getTotalSizeWhenWrittenDelimited(rh, call.param);
        if (cellBlock != null) {
            totalSize += cellBlock.remaining();
        }

        ByteBuf b = channel.alloc().directBuffer(4 + totalSize);
        try (ByteBufOutputStream out = new ByteBufOutputStream(b)) {
            IPCUtil.write(out, rh, call.param, cellBlock);
        }

        channel.writeAndFlush(b).addListener(new CallWriteListener(this, call.id));
    } catch (IOException e) {
        close(e);
    }
}

From source file:hivemall.mf.BPRMatrixFactorizationUDTF.java

private final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer inputBuf = this.inputBuf;
    final NioFixedSegment fileIO = this.fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.mf.BPRMatrixFactorization$Counter", "iteration");

    try {//w  ww .  j  a  v  a2  s  . c  om
        if (lastWritePos == 0) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int u = inputBuf.getInt();
                    int i = inputBuf.getInt();
                    int j = inputBuf.getInt();
                    // invoke train
                    count++;
                    train(u, i, j);
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(count) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.position() > 0) {
                writeBuffer(inputBuf, fileIO, lastWritePos);
            } else if (lastWritePos == 0) {
                return; // no training example
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                long seekPos = 0L;
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(seekPos, inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;
                    seekPos += bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    assert (remain > 0) : remain;
                    for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) {
                        int u = inputBuf.getInt();
                        int i = inputBuf.getInt();
                        int j = inputBuf.getInt();
                        // invoke train
                        count++;
                        train(u, i, j);
                    }
                    inputBuf.compact();
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:org.apache.hadoop.hdfs.DFSInputStream.java

@Override
public synchronized int read(final ByteBuffer buf) throws IOException {
    ReaderStrategy byteBufferReader = new ByteBufferStrategy(buf);

    return readWithStrategy(byteBufferReader, 0, buf.remaining());
}

From source file:com.healthmarketscience.jackcess.impl.TableImpl.java

/**
 * Writes the given name into the given buffer in the format as expected by
 * {@link #readName}.//from  w  ww  .j a va 2 s.c  o  m
 */
static void writeName(ByteBuffer buffer, String name, Charset charset) {
    ByteBuffer encName = ColumnImpl.encodeUncompressedText(name, charset);
    buffer.putShort((short) encName.remaining());
    buffer.put(encName);
}

From source file:hivemall.recommend.SlimUDTF.java

private void runIterativeTraining() throws HiveException {
    final ByteBuffer buf = this._inputBuf;
    final NioStatefulSegment dst = this._fileIO;
    assert (buf != null);
    assert (dst != null);

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.recommend.slim$Counter", "iteration");

    try {/*  www  . j  a  va 2 s .c o m*/
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();
            for (int iter = 2; iter < numIterations; iter++) {
                _cvState.next();
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    replayTrain(buf);
                }
                buf.rewind();
                if (_cvState.isConverged(_observedTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(_observedTrainingExamples)
                    + " training examples on memory (thus "
                    + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration())
                    + " training updates in total) ");

        } else { // read training examples in the temporary file and invoke train for each example
            // write KNNi in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }

            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }

            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info("Wrote KNN entries of axis items to a temporary file for iterative training: "
                        + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            for (int iter = 2; iter < numIterations; iter++) {
                _cvState.next();
                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // load a KNNi to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;
                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        replayTrain(buf);
                        remain -= recordBytes;
                    }
                    buf.compact();
                }
                if (_cvState.isConverged(_observedTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + _cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(_observedTrainingExamples)
                    + " training examples on memory and KNNi data on secondary storage (thus "
                    + NumberUtils.formatNumber(_observedTrainingExamples * _cvState.getCurrentIteration())
                    + " training updates in total) ");

        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this._inputBuf = null;
        this._fileIO = null;
    }
}

From source file:ipc.Server.java

/**
 * This is a wrapper around {@link WritableByteChannel#write(ByteBuffer)}.
 * If the amount of data is large, it writes to channel in smaller chunks. 
 * This is to avoid jdk from creating many direct buffers as the size of 
 * buffer increases. This also minimizes extra copies in NIO layer
 * as a result of multiple write operations required to write a large 
 * buffer.  /*  w w w.j av  a  2 s .  c  om*/
 *
 * @see WritableByteChannel#write(ByteBuffer)
 */
private int channelWrite(WritableByteChannel channel, ByteBuffer buffer) throws IOException {

    int count = (buffer.remaining() <= NIO_BUFFER_LIMIT) ? channel.write(buffer)
            : channelIO(null, channel, buffer);
    return count;
}

From source file:ipc.Server.java

/**
 * This is a wrapper around {@link ReadableByteChannel#read(ByteBuffer)}.
 * If the amount of data is large, it writes to channel in smaller chunks. 
 * This is to avoid jdk from creating many direct buffers as the size of 
 * ByteBuffer increases. There should not be any performance degredation.
 * //w  ww  .  j av  a 2  s.  c  om
 * @see ReadableByteChannel#read(ByteBuffer)
 */
private int channelRead(ReadableByteChannel channel, ByteBuffer buffer) throws IOException {

    int count = (buffer.remaining() <= NIO_BUFFER_LIMIT) ? channel.read(buffer)
            : channelIO(channel, null, buffer);
    return count;
}

From source file:hivemall.fm.FactorizationMachineUDTF.java

protected void runTrainingIteration(int iterations) throws HiveException {
    final ByteBuffer inputBuf = this._inputBuf;
    final NioStatefullSegment fileIO = this._fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = _t;
    final boolean adaregr = _va_rand != null;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.fm.FactorizationMachines$Counter", "iteration");

    try {/*from  w  ww .  jav a 2  s.c  o m*/
        if (fileIO.getPosition() == 0L) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int bytes = inputBuf.getInt();
                    assert (bytes > 0) : bytes;
                    int xLength = inputBuf.getInt();
                    final Feature[] x = new Feature[xLength];
                    for (int j = 0; j < xLength; j++) {
                        x[j] = instantiateFeature(inputBuf);
                    }
                    double y = inputBuf.getDouble();
                    // invoke train
                    ++_t;
                    train(x, y, adaregr);
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(_t) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.remaining() > 0) {
                writeBuffer(inputBuf, fileIO);
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                fileIO.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    if (remain < INT_BYTES) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= INT_BYTES) {
                        int pos = inputBuf.position();
                        int recordBytes = inputBuf.getInt();
                        remain -= INT_BYTES;
                        if (remain < recordBytes) {
                            inputBuf.position(pos);
                            break;
                        }

                        final int xLength = inputBuf.getInt();
                        final Feature[] x = new Feature[xLength];
                        for (int j = 0; j < xLength; j++) {
                            x[j] = instantiateFeature(inputBuf);
                        }
                        double y = inputBuf.getDouble();

                        // invoke training
                        ++_t;
                        train(x, y, adaregr);

                        remain -= recordBytes;
                    }
                    inputBuf.compact();
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus " + NumberUtils.formatNumber(_t)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this._inputBuf = null;
        this._fileIO = null;
    }
}

From source file:hivemall.GeneralLearnerBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.GeneralLearnerBase$Counter", "iteration");

    try {//www . j  ava  2s. c  o m
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int featureVectorLength = buf.getInt();
                    final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                    for (int j = 0; j < featureVectorLength; j++) {
                        featureVector[j] = readFeatureValue(buf, featureType);
                    }
                    float target = buf.getFloat();
                    train(featureVector, target);
                }
                buf.rewind();

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;

                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int featureVectorLength = buf.getInt();
                        final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                        for (int j = 0; j < featureVectorLength; j++) {
                            featureVector[j] = readFeatureValue(buf, featureType);
                        }
                        float target = buf.getFloat();
                        train(featureVector, target);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:com.l2jfree.network.mmocore.ReadWriteThread.java

private void parseClientPacket(ByteBuffer buf, int dataSize, T client) {
    final int pos = buf.position();
    final DataSizeHolder dsh = getDataSizeHolder().init(dataSize);

    if (client.decipher(buf, dsh) && buf.hasRemaining()) {
        // remove useless bytes
        dsh.decreaseSize(dsh.getMinPadding());
        // calculate possibly remaining useless bytes
        final int maxPossiblePadding = dsh.getMaxPadding() - dsh.getMinPadding();

        // apply limit
        final int limit = buf.limit();
        buf.limit(pos + dsh.getSize());// ww  w.  j  a  va  2 s  . c  o  m

        final int opcode = buf.get() & 0xFF;

        if (getMMOController().canReceivePacketFrom(client, opcode)) {
            RP cp = getPacketHandler().handlePacket(buf, client, opcode);

            if (cp != null) {
                System.out.println("READ: " + client.getState() + " " + cp.getClass().getSimpleName());

                // remove useless bytes #2, using packet specs
                int maxLeftoverPadding = maxPossiblePadding;
                final int overflow = buf.remaining() - cp.getMaximumLength();
                if (maxPossiblePadding > 0 && // there may be useless bytes
                        overflow > 0) // and we have too much
                {
                    // avoid any damage to the packet body
                    final int removable = Math.min(overflow, maxPossiblePadding);
                    buf.limit(buf.limit() - removable);
                    maxLeftoverPadding -= removable;
                }

                getMmoBuffer().setByteBuffer(buf);
                cp.setClient(client);

                try {
                    if (getMmoBuffer().getAvailableBytes() < cp.getMinimumLength()) {
                        getMMOController().report(ErrorMode.BUFFER_UNDER_FLOW, client, cp, null);
                    } else if (getMmoBuffer().getAvailableBytes() > cp.getMaximumLength()) {
                        getMMOController().report(ErrorMode.BUFFER_OVER_FLOW, client, cp, null);
                    } else {
                        cp.read(getMmoBuffer());

                        client.executePacket(cp);

                        if (buf.hasRemaining() && // some unused data, a bad sign
                                buf.remaining() > maxLeftoverPadding) // and definitely not padded bytes
                        {
                            // FIXME disabled until packet structures updated properly
                            //report(ErrorMode.BUFFER_OVER_FLOW, client, cp, null);

                            MMOController._log.info("Invalid packet format (buf: " + buf + ", dataSize: "
                                    + dataSize + ", pos: " + pos + ", limit: " + limit + ", opcode: 0x"
                                    + HexUtil.fillHex(opcode, 2) + ") used for reading - " + client + " - "
                                    + cp.getType() + " - " + getMMOController().getVersionInfo());
                        }
                    }
                } catch (BufferUnderflowException e) {
                    getMMOController().report(ErrorMode.BUFFER_UNDER_FLOW, client, cp, e);
                } catch (RuntimeException e) {
                    getMMOController().report(ErrorMode.FAILED_READING, client, cp, e);
                }

                getMmoBuffer().setByteBuffer(null);
            }
        }

        buf.limit(limit);
    }
}