Example usage for java.lang Short SIZE

List of usage examples for java.lang Short SIZE

Introduction

In this page you can find the example usage for java.lang Short SIZE.

Prototype

int SIZE

To view the source code for java.lang Short SIZE.

Click Source Link

Document

The number of bits used to represent a short value in two's complement binary form.

Usage

From source file:com.moscona.dataSpace.Numeric.java

@Override
public long sizeInBytes() {
    if (value == null) {
        return 2L;
    }//from  ww w . j  a  v  a2 s .  c o  m
    if (value.getClass() == Double.class) {
        return 2L + Double.SIZE / 8;
    }
    if (value.getClass() == Float.class) {
        return 2L + Float.SIZE / 8;
    }
    if (value.getClass() == Long.class) {
        return 2L + Long.SIZE / 8;
    }
    if (value.getClass() == Integer.class) {
        return 2L + Integer.SIZE / 8;
    }
    if (value.getClass() == Short.class) {
        return 2L + Short.SIZE / 8;
    }
    if (value.getClass() == Byte.class) {
        return 2L + Byte.SIZE / 8;
    }
    return 2L;
}

From source file:org.bdval.io.compound.CompoundDataInput.java

/**
 * {@inheritDoc}//from   www .  j a v a  2s  . c o  m
 */
public int readUnsignedShort() throws IOException {
    fileSize -= Short.SIZE;
    if (fileSize < 0) {
        throw new EOFException();
    }
    return dataInput.readUnsignedShort();
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

/**
 * Compress a set of columns.//from  ww  w.j  av a2  s.c  om
 *
 * The header contains a compressed array of data types.
 * The body contains compressed columns and their metadata.
 * The footer contains a compressed array of chunk sizes. The final four bytes of the footer encode the byte size of that compressed array.
 *
 * @param colSet
 *
 * @return ByteBuffer representing the compressed set.
 */
@Override
public ByteBuffer compress(ColumnBuffer[] colSet) {

    // Many compression libraries allow you to avoid allocation of intermediate arrays.
    // To use these API, we need to preallocate the output container.

    // Reserve space for the header.
    int[] dataType = new int[colSet.length];
    int maxCompressedSize = Snappy.maxCompressedLength(4 * dataType.length);

    // Reserve space for the compressed nulls BitSet for each column.
    maxCompressedSize += colSet.length * Snappy.maxCompressedLength((colSet.length / 8) + 1);

    // Track the length of `List<Integer> compressedSize` which will be declared later.
    int uncompressedFooterLength = 1 + 2 * colSet.length;

    for (int colNum = 0; colNum < colSet.length; ++colNum) {
        // Reserve space for the compressed columns.
        dataType[colNum] = colSet[colNum].getType().toTType().getValue();
        switch (TTypeId.findByValue(dataType[colNum])) {
        case BOOLEAN_TYPE:
            maxCompressedSize += Integer.SIZE / Byte.SIZE; // This is for the encoded length.
            maxCompressedSize += Snappy.maxCompressedLength((colSet.length / 8) + 1);
            break;
        case TINYINT_TYPE:
            maxCompressedSize += Snappy.maxCompressedLength(colSet.length);
            break;
        case SMALLINT_TYPE:
            maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Short.SIZE / Byte.SIZE);
            break;
        case INT_TYPE:
            maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Integer.SIZE / Byte.SIZE);
            break;
        case BIGINT_TYPE:
            maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Long.SIZE / Byte.SIZE);
            break;
        case DOUBLE_TYPE:
            maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Double.SIZE / Byte.SIZE);
            break;
        case BINARY_TYPE:
            // Reserve space for the size of the compressed array of row sizes.
            maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Integer.SIZE / Byte.SIZE);

            // Reserve space for the size of the compressed flattened bytes.
            for (ByteBuffer nextBuffer : colSet[colNum].toTColumn().getBinaryVal().getValues()) {
                maxCompressedSize += Snappy.maxCompressedLength(nextBuffer.limit());
            }

            // Add an additional value to the list of compressed chunk sizes (length of `rowSize` array).
            uncompressedFooterLength++;

            break;
        case STRING_TYPE:
            // Reserve space for the size of the compressed array of row sizes.
            maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Integer.SIZE / Byte.SIZE);

            // Reserve space for the size of the compressed flattened bytes.
            for (String nextString : colSet[colNum].toTColumn().getStringVal().getValues()) {
                maxCompressedSize += Snappy
                        .maxCompressedLength(nextString.getBytes(StandardCharsets.UTF_8).length);
            }

            // Add an additional value to the list of compressed chunk sizes (length of `rowSize` array).
            uncompressedFooterLength++;

            break;
        default:
            throw new IllegalStateException("Unrecognized column type");
        }
    }
    // Reserve space for the footer.
    maxCompressedSize += Snappy.maxCompressedLength(uncompressedFooterLength * Integer.SIZE / Byte.SIZE);

    // Allocate the output container.
    ByteBuffer output = ByteBuffer.allocate(maxCompressedSize);

    // Allocate the footer. This goes in the footer because we don't know the chunk sizes until after
    // the columns have been compressed and written.
    ArrayList<Integer> compressedSize = new ArrayList<Integer>(uncompressedFooterLength);

    // Write to the output buffer.
    try {
        // Write the header.
        compressedSize.add(writePrimitives(dataType, output));

        // Write the compressed columns and metadata.
        for (int colNum = 0; colNum < colSet.length; colNum++) {
            switch (TTypeId.findByValue(dataType[colNum])) {
            case BOOLEAN_TYPE: {
                TBoolColumn column = colSet[colNum].toTColumn().getBoolVal();

                List<Boolean> bools = column.getValues();
                BitSet bsBools = new BitSet(bools.size());
                for (int rowNum = 0; rowNum < bools.size(); rowNum++) {
                    bsBools.set(rowNum, bools.get(rowNum));
                }

                compressedSize.add(writePrimitives(column.getNulls(), output));

                // BitSet won't write trailing zeroes so we encode the length
                output.putInt(column.getValuesSize());

                compressedSize.add(writePrimitives(bsBools.toByteArray(), output));

                break;
            }
            case TINYINT_TYPE: {
                TByteColumn column = colSet[colNum].toTColumn().getByteVal();
                compressedSize.add(writePrimitives(column.getNulls(), output));
                compressedSize.add(writeBoxedBytes(column.getValues(), output));
                break;
            }
            case SMALLINT_TYPE: {
                TI16Column column = colSet[colNum].toTColumn().getI16Val();
                compressedSize.add(writePrimitives(column.getNulls(), output));
                compressedSize.add(writeBoxedShorts(column.getValues(), output));
                break;
            }
            case INT_TYPE: {
                TI32Column column = colSet[colNum].toTColumn().getI32Val();
                compressedSize.add(writePrimitives(column.getNulls(), output));
                compressedSize.add(writeBoxedIntegers(column.getValues(), output));
                break;
            }
            case BIGINT_TYPE: {
                TI64Column column = colSet[colNum].toTColumn().getI64Val();
                compressedSize.add(writePrimitives(column.getNulls(), output));
                compressedSize.add(writeBoxedLongs(column.getValues(), output));
                break;
            }
            case DOUBLE_TYPE: {
                TDoubleColumn column = colSet[colNum].toTColumn().getDoubleVal();
                compressedSize.add(writePrimitives(column.getNulls(), output));
                compressedSize.add(writeBoxedDoubles(column.getValues(), output));
                break;
            }
            case BINARY_TYPE: {
                TBinaryColumn column = colSet[colNum].toTColumn().getBinaryVal();

                // Initialize the array of row sizes.
                int[] rowSizes = new int[column.getValuesSize()];
                int totalSize = 0;
                for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) {
                    rowSizes[rowNum] = column.getValues().get(rowNum).limit();
                    totalSize += column.getValues().get(rowNum).limit();
                }

                // Flatten the data for Snappy for a better compression ratio.
                ByteBuffer flattenedData = ByteBuffer.allocate(totalSize);
                for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) {
                    flattenedData.put(column.getValues().get(rowNum));
                }

                // Write nulls bitmap.
                compressedSize.add(writePrimitives(column.getNulls(), output));

                // Write the list of row sizes.
                compressedSize.add(writePrimitives(rowSizes, output));

                // Write the compressed, flattened data.
                compressedSize.add(writePrimitives(flattenedData.array(), output));

                break;
            }
            case STRING_TYPE: {
                TStringColumn column = colSet[colNum].toTColumn().getStringVal();

                // Initialize the array of row sizes.
                int[] rowSizes = new int[column.getValuesSize()];
                int totalSize = 0;
                for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) {
                    rowSizes[rowNum] = column.getValues().get(rowNum).length();
                    totalSize += column.getValues().get(rowNum).length();
                }

                // Flatten the data for Snappy for a better compression ratio.
                StringBuilder flattenedData = new StringBuilder(totalSize);
                for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) {
                    flattenedData.append(column.getValues().get(rowNum));
                }

                // Write nulls bitmap.
                compressedSize.add(writePrimitives(column.getNulls(), output));

                // Write the list of row sizes.
                compressedSize.add(writePrimitives(rowSizes, output));

                // Write the flattened data.
                compressedSize.add(
                        writePrimitives(flattenedData.toString().getBytes(StandardCharsets.UTF_8), output));

                break;
            }
            default:
                throw new IllegalStateException("Unrecognized column type");
            }
        }

        // Write the footer.
        output.putInt(writeBoxedIntegers(compressedSize, output));

    } catch (IOException e) {
        e.printStackTrace();
    }
    output.flip();
    return output;
}

From source file:com.alibaba.jstorm.utils.JStormUtils.java

public static byte[] barr(Short v) {
    byte[] byteArray = new byte[Short.SIZE / 8];
    for (int i = 0; i < byteArray.length; i++) {
        int off = (byteArray.length - 1 - i) * 8;
        byteArray[i] = (byte) ((v >> off) & 0xFF);
    }//from  w  w w  .  j  a v a  2 s. co  m
    return byteArray;
}

From source file:org.apache.hadoop.hdfs.server.datanode.BlockMetadataHeader.java

/**
 * Returns the size of the header//from  w  ww  . j  a  v a 2  s  .c o m
 */
public static int getHeaderSize() {
    return Short.SIZE / Byte.SIZE + DataChecksum.getChecksumHeaderSize();
}

From source file:com.delphix.session.impl.frame.SerialNumber.java

@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
    serialBits = in.readByte() & 0xff; // Unsigned byte

    if (serialBits < Byte.SIZE) {
        serialNumber = in.readByte();//from  ww  w .  jav  a  2 s  .com
    } else if (serialBits < Short.SIZE) {
        serialNumber = in.readShort();
    } else if (serialBits < Integer.SIZE) {
        serialNumber = in.readInt();
    } else {
        serialNumber = in.readLong();
    }
}

From source file:com.delphix.session.impl.frame.SerialNumber.java

@Override
public void writeExternal(ObjectOutput out) throws IOException {
    out.writeByte(serialBits);//from  www  .  ja va  2  s  .  c o m

    if (serialBits < Byte.SIZE) {
        out.writeByte((int) serialNumber);
    } else if (serialBits < Short.SIZE) {
        out.writeShort((int) serialNumber);
    } else if (serialBits < Integer.SIZE) {
        out.writeInt((int) serialNumber);
    } else {
        out.writeLong(serialNumber);
    }
}

From source file:org.cloudata.core.common.io.CWritableUtils.java

public static int getShortByteSize() {
    return Short.SIZE / 8;
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private int writePrimitives(short[] primitives, ByteBuffer output) throws IOException {
    int bytesWritten = Snappy.rawCompress(primitives, 0, primitives.length * Short.SIZE / Byte.SIZE,
            output.array(), output.arrayOffset() + output.position());
    output.position(output.position() + bytesWritten);
    return bytesWritten;
}

From source file:org.eclipse.january.dataset.DTypeUtils.java

/**
 * @param dtype/*from www .  j  av a  2s .c o  m*/
 * @param isize
 *            number of elements in an item
 * @return length of single item in bytes
 */
public static int getItemBytes(final int dtype, final int isize) {
    int size;

    switch (dtype) {
    case Dataset.BOOL:
        size = 1; // How is this defined?
        break;
    case Dataset.INT8:
    case Dataset.ARRAYINT8:
        size = Byte.SIZE / 8;
        break;
    case Dataset.INT16:
    case Dataset.ARRAYINT16:
    case Dataset.RGB:
        size = Short.SIZE / 8;
        break;
    case Dataset.INT32:
    case Dataset.ARRAYINT32:
        size = Integer.SIZE / 8;
        break;
    case Dataset.INT64:
    case Dataset.ARRAYINT64:
        size = Long.SIZE / 8;
        break;
    case Dataset.FLOAT32:
    case Dataset.ARRAYFLOAT32:
    case Dataset.COMPLEX64:
        size = Float.SIZE / 8;
        break;
    case Dataset.FLOAT64:
    case Dataset.ARRAYFLOAT64:
    case Dataset.COMPLEX128:
        size = Double.SIZE / 8;
        break;
    default:
        size = 0;
        break;
    }

    return size * isize;
}