List of usage examples for java.lang Byte SIZE
int SIZE
To view the source code for java.lang Byte SIZE.
Click Source Link
From source file:org.onlab.util.ImmutableByteSequenceTest.java
@Test public void testBitSetMethods() throws Exception { // All zeros tests assertThat("3 bytes, all 0's", ImmutableByteSequence.ofZeros(3), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { 0, 0, 0 })))); assertThat("3 bytes, all 0's via prefix", ImmutableByteSequence.prefixZeros(3, 3 * Byte.SIZE), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { 0, 0, 0 })))); // All ones tests assertThat("3 bytes, all 1's", ImmutableByteSequence.ofZeros(3), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { 0, 0, 0 })))); assertThat("3 bytes, all 1's via prefix", ImmutableByteSequence.prefixOnes(3, 3 * Byte.SIZE), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { (byte) 0xff, (byte) 0xff, (byte) 0xff })))); // Zero prefix tests assertThat("2 bytes, prefixed with 5 0's", ImmutableByteSequence.prefix(2, 5, (byte) 0), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { (byte) 0x7, (byte) 0xff })))); assertThat("4 bytes, prefixed with 16 0's", ImmutableByteSequence.prefix(4, 16, (byte) 0), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { 0, 0, (byte) 0xff, (byte) 0xff })))); assertThat("4 bytes, prefixed with 20 0's", ImmutableByteSequence.prefix(4, 20, (byte) 0), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { 0, 0, (byte) 0x0f, (byte) 0xff })))); assertThat("8 bytes, prefixed with 36 0's", ImmutableByteSequence.prefixZeros(8, 38), is(equalTo(ImmutableByteSequence .copyFrom(new byte[] { 0, 0, 0, 0, (byte) 0x03, (byte) 0xff, (byte) 0xff, (byte) 0xff })))); // Ones prefix tests assertThat("2 bytes, prefixed with 5 1's", ImmutableByteSequence.prefix(2, 5, (byte) 0xff), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { (byte) 0xf8, 0 })))); assertThat("4 bytes, prefixed with 16 1's", ImmutableByteSequence.prefix(4, 16, (byte) 0xff), is(equalTo(ImmutableByteSequence.copyFrom(new byte[] { (byte) 0xff, (byte) 0xff, 0, 0 })))); assertThat("4 bytes, prefixed with 20 1's", ImmutableByteSequence.prefix(4, 20, (byte) 0xff), is( equalTo(ImmutableByteSequence.copyFrom(new byte[] { (byte) 0xff, (byte) 0xff, (byte) 0xf0, 0 })))); assertThat("8 bytes, prefixed with 10 1's", ImmutableByteSequence.prefixOnes(8, 10), is(equalTo( ImmutableByteSequence.copyFrom(new byte[] { (byte) 0xff, (byte) 0xc0, 0, 0, 0, 0, 0, 0 })))); }
From source file:org.mitre.math.linear.BufferRealMatrix.java
/** * Create a new matrix with the supplied row and column dimensions. * * @param rows the number of rows in the new matrix * @param columns the number of columns in the new matrix * @param file the file to use to store the mapped matrix (<code>null</code> allowed and a tempFile will be created) * @throws IllegalArgumentException/* ww w . ja va 2 s .c om*/ * @throws IOException */ public BufferRealMatrix(final int rows, final int columns, File file) throws IllegalArgumentException, IOException { super(rows, columns); this.rows = rows; this.columns = columns; // number of blocks this.blockRows = (rows + BLOCK_SIZE - 1) / BLOCK_SIZE; this.blockColumns = (columns + BLOCK_SIZE - 1) / BLOCK_SIZE; if (file == null) { file = File.createTempFile(TEMP_FILE_PREFIX, null); LOG.debug(String.format("Created tempFile '%s'", file.getAbsolutePath())); } RandomAccessFile raf = new RandomAccessFile(file, "rw"); this.dataFileChannel = raf.getChannel(); long mbbSize = (long) (Double.SIZE / Byte.SIZE) * (long) rows * (long) columns + BUFFER_HEADER_SIZE; LOG.debug(String.format("Matrix size will be %d bytes and %d by %d blocks", mbbSize, this.blockRows, this.blockColumns)); MappedByteBuffer bb = this.dataFileChannel.map(FileChannel.MapMode.READ_WRITE, 0, BUFFER_HEADER_SIZE); bb.clear(); bb.putInt(BLOCK_BYTE_SIZE); bb.putInt(rows); bb.putInt(columns); // note: we don't create the layout like BlockedRealMatrix // Is this set to zeros? It would be a pain/slow to init it if it is realy big }
From source file:ie.peternagy.jcrypto.algo.AesWrapper.java
protected byte[] createHeader() { try {//from www. j a v a2 s.co m byte[] garbageByte = CryptoSecurityUtil.getSecureBytes(CryptoSecurityUtil.getRandomIntInRange(0, 768)); byte[] baseKeyEnc = curve.doFinalWithHeader(baseKey, true); ByteArrayOutputStream header = new ByteArrayOutputStream(); header.write((byte) 100); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(iv.length).array()); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(salt.length).array()); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(garbageByte.length).array()); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(baseKeyEnc.length).array()); header.write(iv); header.write(salt); header.write(garbageByte); header.write(baseKeyEnc);//encrypt with EC //include: long and crc32 for data return header.toByteArray(); } catch (IOException ex) { Logger.getLogger(AesWrapper.class.getName()).log(Level.SEVERE, null, ex); } return null; }
From source file:org.apache.hadoop.hdfs.server.datanode.BlockMetadataHeader.java
/** * Returns the size of the header//from ww w.j av a 2 s. c o m */ public static int getHeaderSize() { return Short.SIZE / Byte.SIZE + DataChecksum.getChecksumHeaderSize(); }
From source file:org.apache.hadoop.io.file.tfile.TFileDumper.java
/** * Dump information about TFile./*from ww w . ja v a 2 s . co m*/ * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s\n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s\n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s\n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s\n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }
From source file:org.apache.accumulo.core.file.rfile.bcfile.TFileDumper.java
/** * Dump information about TFile.// www . ja v a 2s. c o m * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s%n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s%n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s%n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s%n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }
From source file:org.onosproject.net.pi.impl.AbstractCriterionTranslator.java
/** * Checks that the given mask is equivalent to a longest-prefix match and returns the prefix length. If not * possible, the optional value will not be present. * * @param mask byte sequence/* w w w .j av a2 s. c o m*/ * @return optional prefix length */ private Optional<Integer> getPrefixLengthFromMask(ImmutableByteSequence mask) { Integer prefixLength = 0; byte[] byteArray = mask.asArray(); int byteArrayIndex = 0; while (byteArrayIndex < byteArray.length && byteArray[byteArrayIndex] == (byte) 0xff) { prefixLength += Byte.SIZE; byteArrayIndex++; } byte byteVal = byteArray[byteArrayIndex]; while (byteVal != 0) { if ((byteVal & Integer.MIN_VALUE) != Integer.MIN_VALUE) { return Optional.empty(); } prefixLength++; byteVal <<= 1; } byteArrayIndex++; while (byteArrayIndex < byteArray.length) { if (byteArray[byteArrayIndex] != 0) { return Optional.empty(); } byteArrayIndex++; } return Optional.of(prefixLength); }
From source file:com.frostwire.search.CrawlPagedWebSearchPerformer.java
private static byte[] long2array(long l) { byte[] arr = new byte[Long.SIZE / Byte.SIZE]; Conversion.longToByteArray(l, 0, arr, 0, arr.length); return arr;//from ww w . jav a 2 s . c om }
From source file:org.apache.accumulo.core.client.mapreduce.RangeInputSplit.java
/** * This implementation of length is only an estimate, it does not provide exact values. Do not have your code rely on this return value. *//*www . ja va 2s . co m*/ @Override public long getLength() throws IOException { Text startRow = range.isInfiniteStartKey() ? new Text(new byte[] { Byte.MIN_VALUE }) : range.getStartKey().getRow(); Text stopRow = range.isInfiniteStopKey() ? new Text(new byte[] { Byte.MAX_VALUE }) : range.getEndKey().getRow(); int maxCommon = Math.min(7, Math.min(startRow.getLength(), stopRow.getLength())); long diff = 0; byte[] start = startRow.getBytes(); byte[] stop = stopRow.getBytes(); for (int i = 0; i < maxCommon; ++i) { diff |= 0xff & (start[i] ^ stop[i]); diff <<= Byte.SIZE; } if (startRow.getLength() != stopRow.getLength()) diff |= 0xff; return diff + 1; }
From source file:org.springframework.jdbc.repo.impl.jdbc.RawPropertiesRepoImplTest.java
/** * Makes sure that {@link RawPropertiesRepo#removeProperties(String)} * returns no properties for a non-existing entity *//*from ww w. j a v a 2 s . c om*/ @Test public void testRemoveNonExistingEntityProperties() { final String ID = getClass().getSimpleName() + "#testRemoveNonExistingEntityProperties"; assertSimplePropertiesUpdate(ID, createEntityProperties(ID)); repo.removeProperties(ID); for (int index = 0; index < Byte.SIZE; index++) { Map<?, ?> actual = repo.removeProperties(ID); if (ExtendedMapUtils.size(actual) > 0) { fail("Unexpected properties values at index=" + index + ": " + actual); } } }