Example usage for java.io DataInputStream skip

List of usage examples for java.io DataInputStream skip

Introduction

In this page you can find the example usage for java.io DataInputStream skip.

Prototype

public long skip(long n) throws IOException 

Source Link

Document

Skips over and discards n bytes of data from the input stream.

Usage

From source file:org.apache.cassandra.db.SuperColumn.java

public IColumn deserialize(DataInputStream dis, String name, IFilter filter) throws IOException {
    if (dis.available() == 0)
        return null;

    String[] names = RowMutation.getColumnAndColumnFamily(name);
    if (names.length == 1) {
        IColumn superColumn = defreezeSuperColumn(dis);
        if (name.equals(superColumn.name())) {
            /* read the number of columns stored */
            int size = dis.readInt();
            /* read the size of all columns */
            dis.readInt();/*  www . j a v a 2s . c om*/
            IColumn column = null;
            for (int i = 0; i < size; ++i) {
                column = Column.serializer().deserialize(dis, filter);
                if (column != null) {
                    superColumn.addColumn(column.name(), column);
                    column = null;
                    if (filter.isDone()) {
                        break;
                    }
                }
            }
            return superColumn;
        } else {
            /* read the number of columns stored */
            dis.readInt();
            /* read the size of all columns to skip */
            int size = dis.readInt();
            dis.skip(size);
            return null;
        }
    }

    SuperColumn superColumn = defreezeSuperColumn(dis);
    if (!superColumn.isMarkedForDelete()) {
        int size = dis.readInt();
        /* skip the size of the columns */
        dis.readInt();
        if (size > 0) {
            for (int i = 0; i < size; ++i) {
                IColumn subColumn = Column.serializer().deserialize(dis, names[1], filter);
                if (subColumn != null) {
                    superColumn.addColumn(subColumn.name(), subColumn);
                    break;
                }
            }
        }
    }
    return superColumn;
}

From source file:com.facebook.infrastructure.db.SuperColumn.java

public IColumn deserialize(DataInputStream dis, String name, IFilter filter) throws IOException {
    if (dis.available() == 0)
        return null;

    String[] names = RowMutation.getColumnAndColumnFamily(name);
    if (names.length == 1) {
        IColumn superColumn = defreezeSuperColumn(dis);
        if (name.equals(superColumn.name())) {
            if (!superColumn.isMarkedForDelete()) {
                /* read the number of columns stored */
                int size = dis.readInt();
                /* read the size of all columns */
                dis.readInt();//  ww w  .j  a v a  2 s.  c  o m
                IColumn column = null;
                for (int i = 0; i < size; ++i) {
                    column = Column.serializer().deserialize(dis, filter);
                    if (column != null) {
                        superColumn.addColumn(column.name(), column);
                        column = null;
                        if (filter.isDone()) {
                            break;
                        }
                    }
                }
            }
            return superColumn;
        } else {
            /* read the number of columns stored */
            dis.readInt();
            /* read the size of all columns to skip */
            int size = dis.readInt();
            dis.skip(size);
            return null;
        }
    }

    SuperColumn superColumn = defreezeSuperColumn(dis);
    if (!superColumn.isMarkedForDelete()) {
        int size = dis.readInt();
        /* skip the size of the columns */
        dis.readInt();
        if (size > 0) {
            for (int i = 0; i < size; ++i) {
                IColumn subColumn = Column.serializer().deserialize(dis, names[1], filter);
                if (subColumn != null) {
                    superColumn.addColumn(subColumn.name(), subColumn);
                    break;
                }
            }
        }
    }
    return superColumn;
}

From source file:org.apache.hadoop.fs.azure.AzureNativeFileSystemStore.java

@Override
public DataInputStream retrieve(String key, long startByteOffset) throws AzureException, IOException {
    try {//from  w  w  w  .  jav a2 s  .  c  o m
        // Check if a session exists, if not create a session with the
        // Azure storage server.
        if (null == storageInteractionLayer) {
            final String errMsg = String.format("Storage session expected for URI '%s' but does not exist.",
                    sessionUri);
            throw new AssertionError(errMsg);
        }
        checkContainer(ContainerAccessType.PureRead);

        // Get blob reference and open the input buffer stream.
        CloudBlobWrapper blob = getBlobReference(key);

        // Open input stream and seek to the start offset.
        InputStream in = blob.openInputStream(getDownloadOptions(),
                getInstrumentedContext(isConcurrentOOBAppendAllowed()));

        // Create a data input stream.
        DataInputStream inDataStream = new DataInputStream(in);

        // Skip bytes and ignore return value. This is okay
        // because if you try to skip too far you will be positioned
        // at the end and reads will not return data.
        inDataStream.skip(startByteOffset);
        return inDataStream;
    } catch (Exception e) {
        // Re-throw as an Azure storage exception.
        throw new AzureException(e);
    }
}