Example usage for java.nio ByteBuffer mark

List of usage examples for java.nio ByteBuffer mark

Introduction

In this page you can find the example usage for java.nio ByteBuffer mark.

Prototype

public final Buffer mark() 

Source Link

Document

Marks the current position, so that the position may return to this point later by calling reset().

Usage

From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java

/**
 * Loads the optional pre-compiled graph entry from the given tar file.
 *
 * @return graph buffer, or {@code null} if one was not found
 * @throws IOException if the tar file could not be read
 *///w w w .  j a v  a  2 s .c  o  m
private ByteBuffer loadGraph() throws IOException {
    // read the graph metadata just before the tar index entry
    int pos = access.length() - 2 * BLOCK_SIZE - getEntrySize(index.remaining());
    ByteBuffer meta = access.read(pos - 16, 16);
    int crc32 = meta.getInt();
    int count = meta.getInt();
    int bytes = meta.getInt();
    int magic = meta.getInt();

    if (magic != GRAPH_MAGIC) {
        return null; // magic byte mismatch
    }

    if (count < 0 || bytes < count * 16 + 16 || BLOCK_SIZE + bytes > pos) {
        log.warn("Invalid graph metadata in tar file {}", file);
        return null; // impossible uuid and/or byte counts
    }

    // this involves seeking backwards in the file, which might not
    // perform well, but that's OK since we only do this once per file
    ByteBuffer graph = access.read(pos - bytes, bytes);

    byte[] b = new byte[bytes - 16];
    graph.mark();
    graph.get(b);
    graph.reset();

    CRC32 checksum = new CRC32();
    checksum.update(b);
    if (crc32 != (int) checksum.getValue()) {
        log.warn("Invalid graph checksum in tar file {}", file);
        return null; // checksum mismatch
    }

    return graph;
}

From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java

/**
 * Loads the optional pre-compiled graph entry from the given tar file.
 *
 * @return graph buffer, or {@code null} if one was not found
 * @throws IOException if the tar file could not be read
 *//*from   w ww  .j ava2s  . com*/
private ByteBuffer loadGraph() throws IOException {
    // read the graph metadata just before the tar index entry
    int pos = access.length() - 2 * BLOCK_SIZE - getEntrySize(index.remaining() + 16);
    ByteBuffer meta = access.read(pos - 16, 16);
    int crc32 = meta.getInt();
    int count = meta.getInt();
    int bytes = meta.getInt();
    int magic = meta.getInt();

    if (magic != GRAPH_MAGIC) {
        return null; // magic byte mismatch
    }

    if (count < 0 || bytes < count * 16 + 16 || BLOCK_SIZE + bytes > pos) {
        log.warn("Invalid graph metadata in tar file {}", file);
        return null; // impossible uuid and/or byte counts
    }

    // this involves seeking backwards in the file, which might not
    // perform well, but that's OK since we only do this once per file
    ByteBuffer graph = access.read(pos - bytes, bytes);

    byte[] b = new byte[bytes - 16];
    graph.mark();
    graph.get(b);
    graph.reset();

    CRC32 checksum = new CRC32();
    checksum.update(b);
    if (crc32 != (int) checksum.getValue()) {
        log.warn("Invalid graph checksum in tar file {}", file);
        return null; // checksum mismatch
    }

    hasGraph = true;
    return graph;
}

From source file:byps.http.HWireClient.java

protected RequestToCancel createRequestForMessage(BMessage msg, BAsyncResult<BMessage> asyncResult,
        int timeoutSecondsRequest) {
    if (log.isDebugEnabled())
        log.debug("createRequestForMessage(" + msg);
    ByteBuffer requestDataBuffer = msg.buf;

    if (log.isDebugEnabled()) {
        requestDataBuffer.mark();
        BBufferJson bbuf = new BBufferJson(requestDataBuffer);
        log.debug(bbuf.toDetailString());
        requestDataBuffer.reset();/*from   w  w w.j  a  va 2 s.  co m*/
    }

    final RequestToCancel requestToCancel = new RequestToCancel(msg.header.messageId, 0L, 0L, asyncResult);

    final boolean isNegotiate = BNegotiate.isNegotiateMessage(requestDataBuffer);
    final boolean isJson = isNegotiate
            || BMessageHeader.detectProtocol(requestDataBuffer) == BMessageHeader.MAGIC_JSON;
    if (log.isDebugEnabled())
        log.debug("isJson=" + isJson);

    try {
        StringBuilder destUrl = null;

        // Negotiate?
        if (isNegotiate) {

            // Send a GET request and pass the negotiate string as parameter

            String negoStr = new String(requestDataBuffer.array(), requestDataBuffer.position(),
                    requestDataBuffer.limit(), "UTF-8");
            negoStr = URLEncoder.encode(negoStr, "UTF-8");

            String negoServlet = getServletPathForNegotiationAndAuthentication();
            destUrl = getUrlStringBuilder(negoServlet);
            destUrl.append("&negotiate=").append(negoStr);

            // Clear session Cookie
            httpClient.clearHttpSession();
        }

        // Reverse request (long-poll) ?
        else if ((msg.header.flags & BMessageHeader.FLAG_RESPONSE) != 0) {

            String longpollServlet = getServletPathForReverseRequest();
            destUrl = getUrlStringBuilder(longpollServlet);

            timeoutSecondsRequest = 0; // timeout controlled by server, 10min by
                                       // default.
        }

        // Ordinary request
        else {
            destUrl = getUrlStringBuilder("");
        }

        if (log.isDebugEnabled())
            log.debug("open connection, url=" + destUrl);
        final HHttpRequest httpRequest = isNegotiate ? httpClient.get(destUrl.toString(), requestToCancel)
                : httpClient.post(destUrl.toString(), requestDataBuffer, requestToCancel);

        httpRequest.setTimeouts(timeoutSecondsClient, timeoutSecondsRequest);

        requestToCancel.setHttpRequest(httpRequest);

        addRequest(requestToCancel);
    } catch (Throwable e) {
        if (log.isDebugEnabled())
            log.debug("received Throwable: " + e);
        BException bex = new BException(BExceptionC.IOERROR, "IO error", e);
        asyncResult.setAsyncResult(null, bex);
    }

    if (log.isDebugEnabled())
        log.debug(")createRequestForMessage=" + requestToCancel);
    return requestToCancel;
}

From source file:org.apache.tajo.storage.orc.OrcScanner.java

private static FileMetaInfo extractMetaInfoFromFooter(FileSystem fs, Path path, long maxFileLength)
        throws IOException {
    FSDataInputStream file = fs.open(path);

    // figure out the size of the file using the option or filesystem
    long size;/*from  w  w w.java 2s  .  c o  m*/
    if (maxFileLength == Long.MAX_VALUE) {
        size = fs.getFileStatus(path).getLen();
    } else {
        size = maxFileLength;
    }

    //read last bytes into buffer to get PostScript
    int readSize = (int) Math.min(size, DIRECTORY_SIZE_GUESS);
    ByteBuffer buffer = ByteBuffer.allocate(readSize);
    assert buffer.position() == 0;
    file.readFully((size - readSize), buffer.array(), buffer.arrayOffset(), readSize);
    buffer.position(0);

    //read the PostScript
    //get length of PostScript
    int psLen = buffer.get(readSize - 1) & 0xff;
    ensureOrcFooter(file, path, psLen, buffer);
    int psOffset = readSize - 1 - psLen;
    OrcProto.PostScript ps = extractPostScript(buffer, path, psLen, psOffset);

    int footerSize = (int) ps.getFooterLength();
    int metadataSize = (int) ps.getMetadataLength();

    //check if extra bytes need to be read
    ByteBuffer fullFooterBuffer = null;
    int extra = Math.max(0, psLen + 1 + footerSize + metadataSize - readSize);
    if (extra > 0) {
        //more bytes need to be read, seek back to the right place and read extra bytes
        ByteBuffer extraBuf = ByteBuffer.allocate(extra + readSize);
        file.readFully((size - readSize - extra), extraBuf.array(),
                extraBuf.arrayOffset() + extraBuf.position(), extra);
        extraBuf.position(extra);
        //append with already read bytes
        extraBuf.put(buffer);
        buffer = extraBuf;
        buffer.position(0);
        fullFooterBuffer = buffer.slice();
        buffer.limit(footerSize + metadataSize);
    } else {
        //footer is already in the bytes in buffer, just adjust position, length
        buffer.position(psOffset - footerSize - metadataSize);
        fullFooterBuffer = buffer.slice();
        buffer.limit(psOffset);
    }

    // remember position for later
    buffer.mark();

    file.close();

    return new FileMetaInfo(ps.getCompression().toString(), (int) ps.getCompressionBlockSize(),
            (int) ps.getMetadataLength(), buffer, ps.getVersionList(),
            org.apache.orc.OrcFile.WriterVersion.FUTURE, fullFooterBuffer);
}

From source file:com.esri.geoevent.solutions.adapter.cap.CAPInboundAdapter.java

@Override
public void receive(ByteBuffer buffer, String channelId) {
    //System.out.println("Processing...");
    String data;//ww w. ja va 2  s.c o m
    while (buffer.hasRemaining()) {
        buffer.mark();

        try {
            byte[] bytearray = new byte[buffer.remaining()];
            buffer.get(bytearray);
            data = new String(bytearray);

            DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
            DocumentBuilder builder = factory.newDocumentBuilder();
            Document doc = builder.parse(new InputSource(new StringReader(data)));
            NodeList alerts = doc.getElementsByTagName("alert");
            System.out.println();
            System.out.println(new Date().toString() + ": Processing " + alerts.getLength() + " alerts.");
            int procAlerts = 0;
            for (int a = 0; a < alerts.getLength(); a++) {
                Element alert = (Element) alerts.item(a);

                NodeList nodeList = alert.getElementsByTagName("identifier");
                Element line = (Element) nodeList.item(0);

                String identifier = getCharacterDataFromElement(line);
                if (MAP.containsKey(identifier)) {
                    System.out.println(
                            " Alert: " + identifier + " was processed previously. Skipping to next alert.");
                    continue;
                }
                //System.out.println("   Alert "+ a + ": " + identifier + ". Processing now.");
                MAP.put(identifier, identifier);
                procAlerts++;

                GeoEvent alertMsg = parseAlert(alert, identifier);
                if (alertMsg != null) {
                    geoEventListener.receive(alertMsg);
                    System.out.println(" Alert " + a + ": " + identifier);
                    System.out.println(" " + alertMsg.toString());

                    NodeList codes = alert.getElementsByTagName("code");
                    for (int c = 0; c < codes.getLength(); c++) {
                        Element code = (Element) codes.item(c);
                        GeoEvent codeMsg = parseAlertCode(code, identifier);
                        if (codeMsg != null) {
                            geoEventListener.receive(codeMsg);
                            System.out.println("  Code: " + codeMsg.toString());
                        }
                    }

                    NodeList infos = alert.getElementsByTagName("info");
                    for (int i = 0; i < infos.getLength(); i++) {
                        Element info = (Element) infos.item(i);
                        String infoID = identifier + "_" + i;
                        GeoEvent infoMsg = parseAlertInfo(info, identifier, infoID);
                        if (infoMsg != null) {
                            geoEventListener.receive(infoMsg);
                            System.out.println("  Info " + i + ": " + infoID);
                            System.out.println("  " + infoMsg.toString());

                            NodeList categories = info.getElementsByTagName("category");
                            for (int cat = 0; cat < categories.getLength(); cat++) {
                                Element category = (Element) categories.item(cat);
                                GeoEvent catMsg = parseInfoCategory(category, identifier, infoID);
                                if (catMsg != null) {
                                    geoEventListener.receive(catMsg);
                                    System.out.println("   Category: " + catMsg.toString());
                                }
                            }
                            NodeList eventCodes = info.getElementsByTagName("eventCode");
                            for (int e = 0; e < eventCodes.getLength(); e++) {
                                Element eventCode = (Element) eventCodes.item(e);
                                GeoEvent eMsg = parseInfoEventCode(eventCode, identifier, infoID);
                                if (eMsg != null) {
                                    geoEventListener.receive(eMsg);
                                    System.out.println("   Event code: " + eMsg.toString());
                                }
                            }
                            NodeList responseTypes = info.getElementsByTagName("responseType");
                            for (int rt = 0; rt < responseTypes.getLength(); rt++) {
                                Element responseType = (Element) responseTypes.item(rt);
                                GeoEvent rtMsg = parseInfoResponseType(responseType, identifier, infoID);
                                if (rtMsg != null) {
                                    geoEventListener.receive(rtMsg);
                                    System.out.println("   Response type: " + rtMsg.toString());
                                }
                            }
                            NodeList parameters = info.getElementsByTagName("parameter");
                            for (int p = 0; p < parameters.getLength(); p++) {
                                Element parameter = (Element) parameters.item(p);
                                GeoEvent pMsg = parseInfoParameter(parameter, identifier, infoID);
                                if (pMsg != null) {
                                    geoEventListener.receive(pMsg);
                                    System.out.println("   Parameter: " + pMsg.toString());
                                }
                            }
                            NodeList resources = info.getElementsByTagName("resource");
                            for (int r = 0; r < resources.getLength(); r++) {
                                Element resource = (Element) resources.item(r);
                                GeoEvent rMsg = parseInfoResource(resource, identifier, infoID);
                                if (rMsg != null) {
                                    geoEventListener.receive(rMsg);
                                    System.out.println("   Resource " + r + ": ");
                                    System.out.println("   " + rMsg.toString());
                                }
                            }
                            NodeList areas = info.getElementsByTagName("area");
                            for (int ar = 0; ar < areas.getLength(); ar++) {
                                Element area = (Element) areas.item(ar);
                                String areaID = infoID + "_" + ar;
                                GeoEvent areaMsg = parseInfoArea(area, identifier, infoID, areaID);
                                if (areaMsg != null) {
                                    geoEventListener.receive(areaMsg);
                                    System.out.println("   Area " + ar + ": ");
                                    System.out.println("    " + areaMsg.toString());

                                    NodeList polygons = info.getElementsByTagName("polygon");
                                    for (int pg = 0; pg < polygons.getLength(); pg++) {
                                        Element polygon = (Element) polygons.item(pg);
                                        System.out.println("     Polygon " + pg + ": ");
                                        GeoEvent areaGeomMsg = parseInfoAreaGeom(polygon, null, null,
                                                identifier, infoID, areaID);
                                        if (areaGeomMsg != null) {
                                            geoEventListener.receive(areaGeomMsg);
                                            System.out.println("      " + areaGeomMsg.toString());
                                        } else {
                                            System.out.println("      " + getCharacterDataFromElement(polygon));
                                        }
                                    }

                                    NodeList circles = info.getElementsByTagName("circle");
                                    for (int c = 0; c < circles.getLength(); c++) {
                                        Element circle = (Element) circles.item(c);
                                        System.out.println("     Circle " + c + ": ");
                                        GeoEvent areaGeomMsg = parseInfoAreaGeom(null, circle, null, identifier,
                                                infoID, areaID);
                                        if (areaGeomMsg != null) {
                                            geoEventListener.receive(areaGeomMsg);
                                            System.out.println("      " + areaGeomMsg.toString());
                                        } else {
                                            System.out.println("      " + getCharacterDataFromElement(circle));
                                        }
                                    }

                                    NodeList geocodes = info.getElementsByTagName("geocode");
                                    for (int g = 0; g < geocodes.getLength(); g++) {
                                        Element geocode = (Element) geocodes.item(g);
                                        GeoEvent areaGeomMsg = parseInfoAreaGeom(null, null, geocode,
                                                identifier, infoID, areaID);
                                        if (areaGeomMsg != null) {
                                            geoEventListener.receive(areaGeomMsg);
                                            System.out.println("     Geocode " + g + ": ");
                                            System.out.println("      " + areaGeomMsg.toString());
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }

            //System.out.println("Processed " + procAlerts + " of " + alerts.getLength() + " alerts.");

        } catch (Exception e) {
            String msg = e.getMessage();
            System.out.println(msg);
            e.printStackTrace();
        }

        return;
    }
}

From source file:org.apache.nifi.processor.util.listen.handler.socket.StandardSocketChannelHandler.java

/**
 * Process the contents that have been read into the buffer. Allow sub-classes to override this behavior.
 *
 * @param socketChannel the channel the data was read from
 * @param socketBuffer the buffer the data was read into
 * @throws InterruptedException if interrupted when queuing events
 *///ww  w.  j ava 2  s .  c  o m
protected void processBuffer(final SocketChannel socketChannel, final ByteBuffer socketBuffer)
        throws InterruptedException, IOException {
    // get total bytes in buffer
    final int total = socketBuffer.remaining();
    final InetAddress sender = socketChannel.socket().getInetAddress();

    // go through the buffer looking for the end of each message
    currBytes.reset();
    for (int i = 0; i < total; i++) {
        // NOTE: For higher throughput, the looking for \n and copying into the byte stream could be improved
        // Pull data out of buffer and cram into byte array
        byte currByte = socketBuffer.get();

        // check if at end of a message
        if (currByte == getDelimiter()) {
            if (currBytes.size() > 0) {
                final SocketChannelResponder response = new SocketChannelResponder(socketChannel);
                final Map<String, String> metadata = EventFactoryUtil.createMapWithSender(sender.toString());
                final E event = eventFactory.create(currBytes.toByteArray(), metadata, response);
                events.offer(event);
                currBytes.reset();

                // Mark this as the start of the next message
                socketBuffer.mark();
            }
        } else {
            currBytes.write(currByte);
        }
    }
}

From source file:com.slytechs.capture.file.editor.AbstractRawIterator.java

public boolean verifyAdditionalRecords(final ByteBuffer buffer, final int count)
        throws EOFException, IOException {

    buffer.reset();//from  ww w  .  j  a va  2s .  c om

    final int MAX_HEADER_LENGTH = 24;
    final ByteBuffer view = BufferUtils.duplicate(buffer);
    final int capacity = view.capacity();
    boolean status = true;

    for (int i = 0; i < count && view.position() + MAX_HEADER_LENGTH < capacity; i++) {
        view.mark();
        long length = headerReader.readLength(view);
        int p = view.position() + (int) length;

        if (pattern.match(view) == false) {
            status = false;
            break;
        }
        view.reset();

        if (p + MAX_HEADER_LENGTH > view.capacity()) {
            break;
        }

        view.limit(p + MAX_HEADER_LENGTH);
        view.position(p);
    }

    return status;
}

From source file:com.slytechs.capture.file.editor.AbstractRawIterator.java

/**
 * Searches for a packet record start within the file. If the record header is
 * not found exactly at the specified offset, the search is repeated by
 * starting the match at the offset + 1. Incrementing the offset until a match
 * is found or maxSearch has been reached.
 * //  w  w  w.  j  av  a 2 s  .c  o  m
 * @param offset
 *          offset within the file to start the search at. This is the first
 *          byte to search for a record header match.
 * @param maxSearch
 *          a limit on the search. The search will be performed within the
 *          windows of offset <= search < (offset + maxSearch)
 * @return exact offset into the capture file of the start of the next record
 *         header. -1 indicates that no record header was found at the offset
 *         and with the limits set of maxSearch bytes.
 * @throws EOFException
 *           end of file has been reached before the header could be matched.
 *           This indicates that no positive match was made.
 * @throws IOException
 *           any IO errors
 */
public long searchForRecordStart(final ByteBuffer buffer, final int index, final int maxSearch)
        throws EOFException, IOException {

    final int l = index + maxSearch - this.pattern.minLength();

    for (int i = index; i < l; i++) {
        buffer.position(i);
        buffer.mark();

        if (this.pattern.match(buffer) && verifyAdditionalRecords(buffer, 5)) {
            return i;
        }
    }

    return -1;
}

From source file:com.blm.orc.ReaderImpl.java

private static FileMetaInfo extractMetaInfoFromFooter(FileSystem fs, Path path, long maxFileLength)
        throws IOException {
    FSDataInputStream file = fs.open(path);

    // figure out the size of the file using the option or filesystem
    long size;//  ww w . j a v  a 2  s. c om
    if (maxFileLength == Long.MAX_VALUE) {
        size = fs.getFileStatus(path).getLen();
    } else {
        size = maxFileLength;
    }

    //read last bytes into buffer to get PostScript
    int readSize = (int) Math.min(size, DIRECTORY_SIZE_GUESS);
    file.seek(size - readSize);
    ByteBuffer buffer = ByteBuffer.allocate(readSize);
    file.readFully(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining());

    //read the PostScript
    //get length of PostScript
    int psLen = buffer.get(readSize - 1) & 0xff;
    ensureOrcFooter(file, path, psLen, buffer);
    int psOffset = readSize - 1 - psLen;
    CodedInputStream in = CodedInputStream.newInstance(buffer.array(), buffer.arrayOffset() + psOffset, psLen);
    OrcProto.PostScript ps = OrcProto.PostScript.parseFrom(in);

    checkOrcVersion(LOG, path, ps.getVersionList());

    int footerSize = (int) ps.getFooterLength();
    int metadataSize = (int) ps.getMetadataLength();
    OrcFile.WriterVersion writerVersion;
    if (ps.hasWriterVersion()) {
        writerVersion = getWriterVersion(ps.getWriterVersion());
    } else {
        writerVersion = OrcFile.WriterVersion.ORIGINAL;
    }

    //check compression codec
    switch (ps.getCompression()) {
    case NONE:
        break;
    case ZLIB:
        break;
    case SNAPPY:
        break;
    case LZO:
        break;
    default:
        throw new IllegalArgumentException("Unknown compression");
    }

    //check if extra bytes need to be read
    int extra = Math.max(0, psLen + 1 + footerSize + metadataSize - readSize);
    if (extra > 0) {
        //more bytes need to be read, seek back to the right place and read extra bytes
        file.seek(size - readSize - extra);
        ByteBuffer extraBuf = ByteBuffer.allocate(extra + readSize);
        file.readFully(extraBuf.array(), extraBuf.arrayOffset() + extraBuf.position(), extra);
        extraBuf.position(extra);
        //append with already read bytes
        extraBuf.put(buffer);
        buffer = extraBuf;
        buffer.position(0);
        buffer.limit(footerSize + metadataSize);
    } else {
        //footer is already in the bytes in buffer, just adjust position, length
        buffer.position(psOffset - footerSize - metadataSize);
        buffer.limit(psOffset);
    }

    // remember position for later
    buffer.mark();

    file.close();

    return new FileMetaInfo(ps.getCompression().toString(), (int) ps.getCompressionBlockSize(),
            (int) ps.getMetadataLength(), buffer, ps.getVersionList(), writerVersion);
}

From source file:com.linkedin.databus.core.DbusEventBuffer.java

/**
 * Copies the current event bytes from the staging buffer to the main buffer. Previous calls must
 * ensure that the target write area determined by writePos is already free.
 * @param readPos         determines the region in the staging buffer to copy from
 * @param writePos        determines the region in the main buffer to write to
 *//*www . java  2 s.  co  m*/
private void copyReadEventToEventBuffer(ReadEventsReadPosition readPos, ReadEventsWritePosition writePos,
        Iterable<InternalDatabusEventsListener> eventListeners, DbusEventsStatisticsCollector statsCollector,
        boolean logDebugEnabled) {
    final ByteBuffer readBuffer = readPos.getReadBuffer();
    final int numBytesToWrite = readPos.bytesProcessed();
    final int writeStartOfs = writePos.getCurOfs();
    final ByteBuffer curBuf = writePos.getCurBuf();

    assert writePos.getNextFree().bufferGenId() - _head.bufferGenId() <= 1 : writePos.toString() + " buf:"
            + toString();

    assert curBuf.limit() >= writePos.getNextFreeOfs() : "curBuf:" + curBuf + "; " + writePos;

    final int oldLimit = readBuffer.limit();
    readBuffer.mark();
    readBuffer.position(readPos.getReadStart());
    readBuffer.limit(readPos.getPosition());

    // Set the limit/position
    curBuf.position(writeStartOfs);
    if (LOG.isDebugEnabled()) {
        LOG.debug("copying from " + readBuffer + " into " + writePos.getCurBuf() + "head:" + _head + " tail:"
                + _tail);
    }
    curBuf.put(readBuffer); // copy _readBuffer
    readBuffer.limit(oldLimit);
    readBuffer.reset();

    if (numBytesToWrite > 0) {
        // update index and call listeners on each event (may rewrite event)
        updateNewReadEvent(readPos, writePos, statsCollector, eventListeners, logDebugEnabled);
        if (readPos.getLastSeenStgWin() > _seenEndOfPeriodScn) {
            _seenEndOfPeriodScn = readPos.getLastSeenStgWin(); // this is end of period for this SCN
        }
    }
    if (logDebugEnabled)
        LOG.debug("Tail is set to :" + _tail + ", Head is at :" + _head);

    assert (_head.bufferIndex() != _tail.bufferIndex() || _head.getPosition() < _tail.getPosition()
            || _head.bufferOffset() < writePos.getCurBuf().limit());
}