Example usage for java.lang Long SIZE

List of usage examples for java.lang Long SIZE

Introduction

In this page you can find the example usage for java.lang Long SIZE.

Prototype

int SIZE

To view the source code for java.lang Long SIZE.

Click Source Link

Document

The number of bits used to represent a long value in two's complement binary form.

Usage

From source file:org.apache.hadoop.io.file.tfile.TFileDumper.java

/**
 * Dump information about TFile./*www  .j a va 2s .  c  om*/
 * 
 * @param file
 *          Path string of the TFile
 * @param out
 *          PrintStream to output the information.
 * @param conf
 *          The configuration object.
 * @throws IOException
 */
static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException {
    final int maxKeySampleLen = 16;
    Path path = new Path(file);
    FileSystem fs = path.getFileSystem(conf);
    long length = fs.getFileStatus(path).getLen();
    FSDataInputStream fsdis = fs.open(path);
    TFile.Reader reader = new TFile.Reader(fsdis, length, conf);
    try {
        LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
        int blockCnt = reader.readerBCF.getBlockCount();
        int metaBlkCnt = reader.readerBCF.metaIndex.index.size();
        properties.put("BCFile Version", reader.readerBCF.version.toString());
        properties.put("TFile Version", reader.tfileMeta.version.toString());
        properties.put("File Length", Long.toString(length));
        properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName());
        properties.put("Record Count", Long.toString(reader.getEntryCount()));
        properties.put("Sorted", Boolean.toString(reader.isSorted()));
        if (reader.isSorted()) {
            properties.put("Comparator", reader.getComparatorName());
        }
        properties.put("Data Block Count", Integer.toString(blockCnt));
        long dataSize = 0, dataSizeUncompressed = 0;
        if (blockCnt > 0) {
            for (int i = 0; i < blockCnt; ++i) {
                BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i);
                dataSize += region.getCompressedSize();
                dataSizeUncompressed += region.getRawSize();
            }
            properties.put("Data Block Bytes", Long.toString(dataSize));
            if (reader.readerBCF.getDefaultCompressionName() != "none") {
                properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed));
                properties.put("Data Block Compression Ratio",
                        String.format("1:%.1f", (double) dataSizeUncompressed / dataSize));
            }
        }

        properties.put("Meta Block Count", Integer.toString(metaBlkCnt));
        long metaSize = 0, metaSizeUncompressed = 0;
        if (metaBlkCnt > 0) {
            Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values();
            boolean calculateCompression = false;
            for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) {
                MetaIndexEntry e = it.next();
                metaSize += e.getRegion().getCompressedSize();
                metaSizeUncompressed += e.getRegion().getRawSize();
                if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) {
                    calculateCompression = true;
                }
            }
            properties.put("Meta Block Bytes", Long.toString(metaSize));
            if (calculateCompression) {
                properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed));
                properties.put("Meta Block Compression Ratio",
                        String.format("1:%.1f", (double) metaSizeUncompressed / metaSize));
            }
        }
        properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize));
        long leftOverBytes = length - dataSize - metaSize;
        long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size();
        long metaIndexSize = leftOverBytes - miscSize;
        properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize));
        properties.put("Headers Etc Bytes", Long.toString(miscSize));
        // Now output the properties table.
        int maxKeyLength = 0;
        Set<Map.Entry<String, String>> entrySet = properties.entrySet();
        for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) {
            Map.Entry<String, String> e = it.next();
            if (e.getKey().length() > maxKeyLength) {
                maxKeyLength = e.getKey().length();
            }
        }
        for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) {
            Map.Entry<String, String> e = it.next();
            out.printf("%s : %s\n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue());
        }
        out.println();
        reader.checkTFileDataIndex();
        if (blockCnt > 0) {
            String blkID = "Data-Block";
            int blkIDWidth = Align.calculateWidth(blkID, blockCnt);
            int blkIDWidth2 = Align.calculateWidth("", blockCnt);
            String offset = "Offset";
            int offsetWidth = Align.calculateWidth(offset, length);
            String blkLen = "Length";
            int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10);
            String rawSize = "Raw-Size";
            int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10);
            String records = "Records";
            int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10);
            String endKey = "End-Key";
            int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5);

            out.printf("%s %s %s %s %s %s\n", Align.format(blkID, blkIDWidth, Align.CENTER),
                    Align.format(offset, offsetWidth, Align.CENTER),
                    Align.format(blkLen, blkLenWidth, Align.CENTER),
                    Align.format(rawSize, rawSizeWidth, Align.CENTER),
                    Align.format(records, recordsWidth, Align.CENTER),
                    Align.format(endKey, endKeyWidth, Align.LEFT));

            for (int i = 0; i < blockCnt; ++i) {
                BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i);
                TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i);
                out.printf("%s %s %s %s %s ",
                        Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT),
                        Align.format(region.getOffset(), offsetWidth, Align.LEFT),
                        Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT),
                        Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT),
                        Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT));
                byte[] key = indexEntry.key;
                boolean asAscii = true;
                int sampleLen = Math.min(maxKeySampleLen, key.length);
                for (int j = 0; j < sampleLen; ++j) {
                    byte b = key[j];
                    if ((b < 32 && b != 9) || (b == 127)) {
                        asAscii = false;
                    }
                }
                if (!asAscii) {
                    out.print("0X");
                    for (int j = 0; j < sampleLen; ++j) {
                        byte b = key[i];
                        out.printf("%X", b);
                    }
                } else {
                    out.print(new String(key, 0, sampleLen));
                }
                if (sampleLen < key.length) {
                    out.print("...");
                }
                out.println();
            }
        }

        out.println();
        if (metaBlkCnt > 0) {
            String name = "Meta-Block";
            int maxNameLen = 0;
            Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index
                    .entrySet();
            for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) {
                Map.Entry<String, MetaIndexEntry> e = it.next();
                if (e.getKey().length() > maxNameLen) {
                    maxNameLen = e.getKey().length();
                }
            }
            int nameWidth = Math.max(name.length(), maxNameLen);
            String offset = "Offset";
            int offsetWidth = Align.calculateWidth(offset, length);
            String blkLen = "Length";
            int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10);
            String rawSize = "Raw-Size";
            int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10);
            String compression = "Compression";
            int compressionWidth = compression.length();
            out.printf("%s %s %s %s %s\n", Align.format(name, nameWidth, Align.CENTER),
                    Align.format(offset, offsetWidth, Align.CENTER),
                    Align.format(blkLen, blkLenWidth, Align.CENTER),
                    Align.format(rawSize, rawSizeWidth, Align.CENTER),
                    Align.format(compression, compressionWidth, Align.LEFT));

            for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) {
                Map.Entry<String, MetaIndexEntry> e = it.next();
                String blkName = e.getValue().getMetaName();
                BlockRegion region = e.getValue().getRegion();
                String blkCompression = e.getValue().getCompressionAlgorithm().getName();
                out.printf("%s %s %s %s %s\n", Align.format(blkName, nameWidth, Align.LEFT),
                        Align.format(region.getOffset(), offsetWidth, Align.LEFT),
                        Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT),
                        Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT),
                        Align.format(blkCompression, compressionWidth, Align.LEFT));
            }
        }
    } finally {
        IOUtils.cleanup(LOG, reader, fsdis);
    }
}

From source file:org.apache.accumulo.core.file.rfile.bcfile.TFileDumper.java

/**
 * Dump information about TFile.//w  ww.  jav a 2  s .  co  m
 * 
 * @param file
 *          Path string of the TFile
 * @param out
 *          PrintStream to output the information.
 * @param conf
 *          The configuration object.
 * @throws IOException
 */
static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException {
    final int maxKeySampleLen = 16;
    Path path = new Path(file);
    FileSystem fs = path.getFileSystem(conf);
    long length = fs.getFileStatus(path).getLen();
    FSDataInputStream fsdis = fs.open(path);
    TFile.Reader reader = new TFile.Reader(fsdis, length, conf);
    try {
        LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
        int blockCnt = reader.readerBCF.getBlockCount();
        int metaBlkCnt = reader.readerBCF.metaIndex.index.size();
        properties.put("BCFile Version", reader.readerBCF.version.toString());
        properties.put("TFile Version", reader.tfileMeta.version.toString());
        properties.put("File Length", Long.toString(length));
        properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName());
        properties.put("Record Count", Long.toString(reader.getEntryCount()));
        properties.put("Sorted", Boolean.toString(reader.isSorted()));
        if (reader.isSorted()) {
            properties.put("Comparator", reader.getComparatorName());
        }
        properties.put("Data Block Count", Integer.toString(blockCnt));
        long dataSize = 0, dataSizeUncompressed = 0;
        if (blockCnt > 0) {
            for (int i = 0; i < blockCnt; ++i) {
                BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i);
                dataSize += region.getCompressedSize();
                dataSizeUncompressed += region.getRawSize();
            }
            properties.put("Data Block Bytes", Long.toString(dataSize));
            if (reader.readerBCF.getDefaultCompressionName() != "none") {
                properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed));
                properties.put("Data Block Compression Ratio",
                        String.format("1:%.1f", (double) dataSizeUncompressed / dataSize));
            }
        }

        properties.put("Meta Block Count", Integer.toString(metaBlkCnt));
        long metaSize = 0, metaSizeUncompressed = 0;
        if (metaBlkCnt > 0) {
            Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values();
            boolean calculateCompression = false;
            for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) {
                MetaIndexEntry e = it.next();
                metaSize += e.getRegion().getCompressedSize();
                metaSizeUncompressed += e.getRegion().getRawSize();
                if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) {
                    calculateCompression = true;
                }
            }
            properties.put("Meta Block Bytes", Long.toString(metaSize));
            if (calculateCompression) {
                properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed));
                properties.put("Meta Block Compression Ratio",
                        String.format("1:%.1f", (double) metaSizeUncompressed / metaSize));
            }
        }
        properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize));
        long leftOverBytes = length - dataSize - metaSize;
        long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size();
        long metaIndexSize = leftOverBytes - miscSize;
        properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize));
        properties.put("Headers Etc Bytes", Long.toString(miscSize));
        // Now output the properties table.
        int maxKeyLength = 0;
        Set<Map.Entry<String, String>> entrySet = properties.entrySet();
        for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) {
            Map.Entry<String, String> e = it.next();
            if (e.getKey().length() > maxKeyLength) {
                maxKeyLength = e.getKey().length();
            }
        }
        for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) {
            Map.Entry<String, String> e = it.next();
            out.printf("%s : %s%n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue());
        }
        out.println();
        reader.checkTFileDataIndex();
        if (blockCnt > 0) {
            String blkID = "Data-Block";
            int blkIDWidth = Align.calculateWidth(blkID, blockCnt);
            int blkIDWidth2 = Align.calculateWidth("", blockCnt);
            String offset = "Offset";
            int offsetWidth = Align.calculateWidth(offset, length);
            String blkLen = "Length";
            int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10);
            String rawSize = "Raw-Size";
            int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10);
            String records = "Records";
            int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10);
            String endKey = "End-Key";
            int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5);

            out.printf("%s %s %s %s %s %s%n", Align.format(blkID, blkIDWidth, Align.CENTER),
                    Align.format(offset, offsetWidth, Align.CENTER),
                    Align.format(blkLen, blkLenWidth, Align.CENTER),
                    Align.format(rawSize, rawSizeWidth, Align.CENTER),
                    Align.format(records, recordsWidth, Align.CENTER),
                    Align.format(endKey, endKeyWidth, Align.LEFT));

            for (int i = 0; i < blockCnt; ++i) {
                BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i);
                TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i);
                out.printf("%s %s %s %s %s ",
                        Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT),
                        Align.format(region.getOffset(), offsetWidth, Align.LEFT),
                        Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT),
                        Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT),
                        Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT));
                byte[] key = indexEntry.key;
                boolean asAscii = true;
                int sampleLen = Math.min(maxKeySampleLen, key.length);
                for (int j = 0; j < sampleLen; ++j) {
                    byte b = key[j];
                    if ((b < 32 && b != 9) || (b == 127)) {
                        asAscii = false;
                    }
                }
                if (!asAscii) {
                    out.print("0X");
                    for (int j = 0; j < sampleLen; ++j) {
                        byte b = key[i];
                        out.printf("%X", b);
                    }
                } else {
                    out.print(new String(key, 0, sampleLen));
                }
                if (sampleLen < key.length) {
                    out.print("...");
                }
                out.println();
            }
        }

        out.println();
        if (metaBlkCnt > 0) {
            String name = "Meta-Block";
            int maxNameLen = 0;
            Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index
                    .entrySet();
            for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) {
                Map.Entry<String, MetaIndexEntry> e = it.next();
                if (e.getKey().length() > maxNameLen) {
                    maxNameLen = e.getKey().length();
                }
            }
            int nameWidth = Math.max(name.length(), maxNameLen);
            String offset = "Offset";
            int offsetWidth = Align.calculateWidth(offset, length);
            String blkLen = "Length";
            int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10);
            String rawSize = "Raw-Size";
            int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10);
            String compression = "Compression";
            int compressionWidth = compression.length();
            out.printf("%s %s %s %s %s%n", Align.format(name, nameWidth, Align.CENTER),
                    Align.format(offset, offsetWidth, Align.CENTER),
                    Align.format(blkLen, blkLenWidth, Align.CENTER),
                    Align.format(rawSize, rawSizeWidth, Align.CENTER),
                    Align.format(compression, compressionWidth, Align.LEFT));

            for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) {
                Map.Entry<String, MetaIndexEntry> e = it.next();
                String blkName = e.getValue().getMetaName();
                BlockRegion region = e.getValue().getRegion();
                String blkCompression = e.getValue().getCompressionAlgorithm().getName();
                out.printf("%s %s %s %s %s%n", Align.format(blkName, nameWidth, Align.LEFT),
                        Align.format(region.getOffset(), offsetWidth, Align.LEFT),
                        Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT),
                        Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT),
                        Align.format(blkCompression, compressionWidth, Align.LEFT));
            }
        }
    } finally {
        IOUtils.cleanup(LOG, reader, fsdis);
    }
}

From source file:parquet.column.statistics.bloomfilter.BloomFilter.java

public int getBitSize() {
    return bitSet.getData().length * Long.SIZE;
}

From source file:com.frostwire.search.CrawlPagedWebSearchPerformer.java

private static byte[] long2array(long l) {
    byte[] arr = new byte[Long.SIZE / Byte.SIZE];
    Conversion.longToByteArray(l, 0, arr, 0, arr.length);
    return arr;//from ww w  . j  a v  a 2 s . co  m
}

From source file:com.frostwire.search.CrawlPagedWebSearchPerformer.java

private static long array2long(byte[] arr) {
    return Conversion.byteArrayToLong(arr, 0, 0, 0, Long.SIZE / Byte.SIZE);
}

From source file:org.apache.pulsar.broker.admin.v2.SchemasResource.java

@GET
@Path("/{tenant}/{namespace}/{topic}/schema/{version}")
@Produces(MediaType.APPLICATION_JSON)/* w w  w . ja v  a2 s .  c  o m*/
@ApiOperation(value = "Get the schema of a topic at a given version", response = GetSchemaResponse.class)
@ApiResponses(value = {
        @ApiResponse(code = 307, message = "Current broker doesn't serve the namespace of this topic"),
        @ApiResponse(code = 401, message = "Client is not authorized or Don't have admin permission"),
        @ApiResponse(code = 403, message = "Client is not authenticated"),
        @ApiResponse(code = 404, message = "Tenant or Namespace or Topic doesn't exist; or Schema is not found for this topic"),
        @ApiResponse(code = 412, message = "Failed to find the ownership for the topic"), })
public void getSchema(@PathParam("tenant") String tenant, @PathParam("namespace") String namespace,
        @PathParam("topic") String topic, @PathParam("version") @Encoded String version,
        @QueryParam("authoritative") @DefaultValue("false") boolean authoritative,
        @Suspended final AsyncResponse response) {
    validateDestinationAndAdminOperation(tenant, namespace, topic, authoritative);

    String schemaId = buildSchemaId(tenant, namespace, topic);
    ByteBuffer bbVersion = ByteBuffer.allocate(Long.SIZE);
    bbVersion.putLong(Long.parseLong(version));
    SchemaVersion v = pulsar().getSchemaRegistryService().versionFromBytes(bbVersion.array());
    pulsar().getSchemaRegistryService().getSchema(schemaId, v).handle((schema, error) -> {
        if (isNull(error)) {
            if (isNull(schema)) {
                response.resume(Response.status(Response.Status.NOT_FOUND).build());
            } else if (schema.schema.isDeleted()) {
                response.resume(Response.status(Response.Status.NOT_FOUND).build());
            } else {
                response.resume(Response.ok().encoding(MediaType.APPLICATION_JSON)
                        .entity(GetSchemaResponse.builder().version(getLongSchemaVersion(schema.version))
                                .type(schema.schema.getType()).timestamp(schema.schema.getTimestamp())
                                .data(new String(schema.schema.getData())).properties(schema.schema.getProps())
                                .build())
                        .build());
            }
        } else {
            response.resume(error);
        }
        return null;
    });
}

From source file:com.moscona.dataSpace.Factor.java

@Override
public long sizeInBytes() {
    // this is a very rough estimate on the high size - and it doesn't matter since we don't make vectors of these
    return (5L + (long) factor.size() * 5) * Long.SIZE / 8;
}

From source file:com.monitor.baseservice.utils.XCodeUtil.java

public static byte[] longToByteArray(long value) {
    ByteBuffer bb = ByteBuffer.allocate(Long.SIZE / Byte.SIZE);
    bb.order(ByteOrder.LITTLE_ENDIAN);
    bb.putLong(value);//from   w  w w  .jav  a2  s.  c  o m
    return bb.array();
}

From source file:com.facebook.buck.rules.HttpArtifactCache.java

public void storeImpl(RuleKey ruleKey, final File file) throws IOException {
    Request request = createRequestBuilder(ruleKey.toString()).put(new RequestBody() {
        @Override/*from  www  .jav  a 2 s .co m*/
        public MediaType contentType() {
            return OCTET_STREAM;
        }

        @Override
        public long contentLength() throws IOException {
            return Long.SIZE / Byte.SIZE + projectFilesystem.getFileSize(file.toPath())
                    + hashFunction.bits() / Byte.SIZE;
        }

        @Override
        public void writeTo(BufferedSink sink) throws IOException {
            try (DataOutputStream output = new DataOutputStream(sink.outputStream());
                    InputStream input = projectFilesystem.newFileInputStream(file.toPath());
                    HashingInputStream hasher = new HashingInputStream(hashFunction, input)) {
                output.writeLong(projectFilesystem.getFileSize(file.toPath()));
                ByteStreams.copy(hasher, output);
                output.write(hasher.hash().asBytes());
            }
        }
    }).build();

    Response response = storeCall(request);

    if (response.code() != HttpURLConnection.HTTP_ACCEPTED) {
        LOGGER.warn("store(%s): unexpected response: %d", ruleKey, response.code());
    }
}

From source file:org.wso2.carbon.analytics.datasource.core.util.GenericUtils.java

private static int calculateBufferSizePerElement(String name, Object value) throws AnalyticsException {
    int count = 0;
    /* column name length value + data type (including null) */
    count += Integer.SIZE / 8 + 1;
    /* column name */
    count += name.getBytes(StandardCharsets.UTF_8).length;
    if (value instanceof String) {
        /* string length + value */
        count += Integer.SIZE / 8;
        count += ((String) value).getBytes(StandardCharsets.UTF_8).length;
    } else if (value instanceof Long) {
        count += Long.SIZE / 8;
    } else if (value instanceof Double) {
        count += Double.SIZE / 8;
    } else if (value instanceof Boolean) {
        count += Byte.SIZE / 8;//from w  ww  .  j a  va  2s.c o m
    } else if (value instanceof Integer) {
        count += Integer.SIZE / 8;
    } else if (value instanceof Float) {
        count += Float.SIZE / 8;
    } else if (value instanceof byte[]) {
        count += Integer.SIZE / 8;
        count += ((byte[]) value).length;
    } else if (value != null) {
        count += Integer.SIZE / 8;
        count += GenericUtils.serializeObject(value).length;
    }
    return count;
}