List of usage examples for java.nio ByteBuffer getInt
public abstract int getInt(int index);
From source file:com.l2jfree.security.NewCipher.java
public static int getVerifiedChecksum(ByteBuffer buf, final int offset, final int size) { long calculated = 0; int end = offset + size - 4; // ignore embedded checksum int pos;/*from w w w. j a v a 2 s.com*/ for (pos = offset; pos < end; pos += 4) { final long i = buf.getInt(pos); calculated ^= (i & 0xffffffff); } return (int) calculated; }
From source file:com.l2jfree.security.NewCipher.java
/** * Calculates and embeds a packet's checksum.<BR> * Buffer's position will not be changed. * /*from w w w .ja va 2s. co m*/ * @param buf byte buffer * @param offset offset to a packet's body * @param size packet's body size * @param experimental undocumented experimental features */ public static void appendChecksum(ByteBuffer buf, final int offset, final int size, boolean experimental) { int checksum = 0; int end = offset + size - 4; // ignore reserved bytes int pos; for (pos = offset; pos < end; pos += 4) { int i = buf.getInt(pos); checksum ^= i; } buf.putInt(pos, checksum); if (experimental) { Integer real = _checks.get(buf.get(offset)); if (real != null) // someone knows a better scheme? buf.putInt(pos, real); // let them have it } }
From source file:gridool.memcached.gateway.BinaryCommandProxy.java
private static void xferResponse(final byte opcode, final SocketChannel src, final Channel dst, final String key) throws IOException { ByteBuffer headerBuf = ByteBuffer.allocate(BinaryProtocol.HEADER_LENGTH); int headerRead = NIOUtils.readFully(src, headerBuf, BinaryProtocol.HEADER_LENGTH); assert (headerRead == BinaryProtocol.HEADER_LENGTH) : headerRead; headerBuf.flip();/* ww w. java2 s .com*/ if (BinaryProtocol.surpressSuccessResponse(opcode)) { // piggyback will never happens final short status = headerBuf.getShort(6); if (status == 0) { return; } } ChannelBuffer res; int totalBody = headerBuf.getInt(8); if (totalBody > 0) { ByteBuffer bodyBuf = ByteBuffer.allocate(totalBody); int bodyRead = NIOUtils.readFully(src, bodyBuf, totalBody); assert (bodyRead == totalBody) : "bodyRead (" + bodyRead + ") != totalBody (" + totalBody + ")"; bodyBuf.flip(); res = ChannelBuffers.wrappedBuffer(headerBuf, bodyBuf); } else { res = ChannelBuffers.wrappedBuffer(headerBuf); } String opname = BinaryProtocol.resolveName(headerBuf.get(1)); if (LOG.isDebugEnabled()) { Header header = new Header(); header.decode(headerBuf); LOG.debug( "Start sending memcached response [" + opname + "] " + res.readableBytes() + " bytes for key '" + key + "'\n" + header + '\n' + Arrays.toString(res.toByteBuffer().array())); } dst.write(res).addListener(new VerboseListener("sendResponse [" + opname + "] for key: " + key)); }
From source file:com.linkedin.pinot.perf.ForwardIndexReaderBenchmark.java
public static void singleValuedReadBenchMarkV2(File file, int numDocs, int numBits) throws Exception { boolean signed = false; boolean isMmap = false; long start, end; boolean fullScan = true; boolean batchRead = true; boolean singleRead = true; PinotDataBuffer heapBuffer = PinotDataBuffer.fromFile(file, ReadMode.heap, FileChannel.MapMode.READ_ONLY, "benchmarking"); com.linkedin.pinot.core.io.reader.impl.v2.FixedBitSingleValueReader reader = new com.linkedin.pinot.core.io.reader.impl.v2.FixedBitSingleValueReader( heapBuffer, numDocs, numBits, signed); if (fullScan) { DescriptiveStatistics stats = new DescriptiveStatistics(); ByteBuffer buffer = ByteBuffer.allocateDirect((int) file.length()); RandomAccessFile raf = new RandomAccessFile(file, "r"); raf.getChannel().read(buffer);/* www .jav a2s . c om*/ raf.close(); int[] input = new int[numBits]; int[] output = new int[32]; int numBatches = (numDocs + 31) / 32; for (int run = 0; run < MAX_RUNS; run++) { start = System.currentTimeMillis(); for (int i = 0; i < numBatches; i++) { for (int j = 0; j < numBits; j++) { input[j] = buffer.getInt(i * numBits * 4 + j * 4); } BitPacking.fastunpack(input, 0, output, 0, numBits); } end = System.currentTimeMillis(); stats.addValue((end - start)); } System.out.println(" v2 full scan stats for " + file.getName()); System.out.println( stats.toString().replaceAll("\n", ", ") + " raw:" + Arrays.toString(stats.getValues())); } if (singleRead) { DescriptiveStatistics stats = new DescriptiveStatistics(); // sequential read for (int run = 0; run < MAX_RUNS; run++) { start = System.currentTimeMillis(); for (int i = 0; i < numDocs; i++) { int value = reader.getInt(i); } end = System.currentTimeMillis(); stats.addValue((end - start)); } System.out.println(" v2 sequential single read for " + file.getName()); System.out.println( stats.toString().replaceAll("\n", ", ") + " raw:" + Arrays.toString(stats.getValues())); } if (batchRead) { DescriptiveStatistics stats = new DescriptiveStatistics(); int batchSize = Math.min(5000, numDocs); int[] output = new int[batchSize]; int[] rowIds = new int[batchSize]; // sequential read for (int run = 0; run < MAX_RUNS; run++) { start = System.currentTimeMillis(); int rowId = 0; while (rowId < numDocs) { int length = Math.min(batchSize, numDocs - rowId); for (int i = 0; i < length; i++) { rowIds[i] = rowId + i; } reader.getIntBatch(rowIds, output, length); rowId = rowId + length; } end = System.currentTimeMillis(); stats.addValue((end - start)); } System.out.println("v2 sequential batch read stats for " + file.getName()); System.out.println( stats.toString().replaceAll("\n", ", ") + " raw:" + Arrays.toString(stats.getValues())); } reader.close(); }
From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java
private static Map<UUID, List<UUID>> parseGraph(ByteBuffer graphByteBuffer) { int count = graphByteBuffer.getInt(graphByteBuffer.limit() - 12); ByteBuffer buffer = graphByteBuffer.duplicate(); buffer.limit(graphByteBuffer.limit() - 16); List<UUID> uuids = newArrayListWithCapacity(count); for (int i = 0; i < count; i++) { uuids.add(new UUID(buffer.getLong(), buffer.getLong())); }//from w w w . j a v a2 s .c o m Map<UUID, List<UUID>> graph = newHashMap(); while (buffer.hasRemaining()) { UUID uuid = uuids.get(buffer.getInt()); List<UUID> list = newArrayList(); int refid = buffer.getInt(); while (refid != -1) { list.add(uuids.get(refid)); refid = buffer.getInt(); } graph.put(uuid, list); } return graph; }
From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java
private static Map<UUID, List<UUID>> parseGraph(ByteBuffer graphByteBuffer, boolean bulkOnly) { int count = graphByteBuffer.getInt(graphByteBuffer.limit() - 12); ByteBuffer buffer = graphByteBuffer.duplicate(); buffer.limit(graphByteBuffer.limit() - 16); List<UUID> uuids = newArrayListWithCapacity(count); for (int i = 0; i < count; i++) { uuids.add(new UUID(buffer.getLong(), buffer.getLong())); }//from w w w . ja va 2 s . c o m Map<UUID, List<UUID>> graph = newHashMap(); while (buffer.hasRemaining()) { UUID uuid = uuids.get(buffer.getInt()); List<UUID> list = newArrayList(); int refid = buffer.getInt(); while (refid != -1) { UUID ref = uuids.get(refid); if (!bulkOnly || !isDataSegmentId(ref.getLeastSignificantBits())) { list.add(ref); } refid = buffer.getInt(); } graph.put(uuid, list); } return graph; }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffReader.java
public static boolean isMMMultipageTiff(String directory) throws IOException { File dir = new File(directory); File[] children = dir.listFiles(); File testFile = null;//w w w.j a v a 2s .com for (File child : children) { if (child.isDirectory()) { File[] grandchildren = child.listFiles(); for (File grandchild : grandchildren) { if (grandchild.getName().endsWith(".tif")) { testFile = grandchild; break; } } } else if (child.getName().endsWith(".tif") || child.getName().endsWith(".TIF")) { testFile = child; break; } } if (testFile == null) { throw new IOException("Unexpected file structure: is this an MM dataset?"); } RandomAccessFile ra; try { ra = new RandomAccessFile(testFile, "r"); } catch (FileNotFoundException ex) { ReportingUtils.logError(ex); return false; } FileChannel channel = ra.getChannel(); ByteBuffer tiffHeader = ByteBuffer.allocate(36); ByteOrder bo; channel.read(tiffHeader, 0); char zeroOne = tiffHeader.getChar(0); if (zeroOne == 0x4949) { bo = ByteOrder.LITTLE_ENDIAN; } else if (zeroOne == 0x4d4d) { bo = ByteOrder.BIG_ENDIAN; } else { throw new IOException("Error reading Tiff header"); } tiffHeader.order(bo); int summaryMDHeader = tiffHeader.getInt(32); channel.close(); ra.close(); if (summaryMDHeader == MultipageTiffWriter.SUMMARY_MD_HEADER) { return true; } return false; }
From source file:com.l2jfree.security.NewCipher.java
/** * Verifies a packet's checksum./* w w w.j a v a2s. co m*/ * * @param buf byte buffer * @param offset offset to a packet's body * @param size packet's body size * @param experimental undocumented experimental features * @param report whether to report checksum validation failures * @return whether packet integrity is OK or not */ public static boolean verifyChecksum(ByteBuffer buf, final int offset, final int size, boolean experimental, boolean report) { // FIXME: this whole method is most likely a big hoax // there is no checksum and definitely no validation! // check if size is multiple of 4 (and > 0) if ((size & 3) != 0 || size <= 4) { if (report) reportSoCalledChecksum(null, offset, size, 0, 0); return false; } long calculated = 0; int end = offset + size - 4; // ignore embedded checksum int pos; for (pos = offset; pos < end; pos += 4) { final long i = buf.getInt(pos); calculated ^= (i & 0xffffffff); } long real = buf.getInt(pos); real &= 0xffffffff; if (experimental && calculated != real) // someone knows a better scheme? _checks.put(buf.get(offset), (int) real); // let them have it reportSoCalledChecksum(buf, offset, size, calculated, real); return (calculated == real); }
From source file:com.koda.integ.hbase.storage.FileExtMultiStorage.java
@Override public StorageHandle storeData(ByteBuffer buf) { int numStorages = storages.length; // Key length (16) int length = buf.getInt(4); int hash = Utils.hash(buf, 8, length, 0); int storageId = Math.abs(hash) % numStorages; FileStorageHandle handle = (FileStorageHandle) storages[storageId].storeData(buf); addCacheRootId(handle, storageId);/*from w w w . j ava2 s . com*/ return handle; }
From source file:guru.benson.pinch.Pinch.java
/** * Extract all ZipEntries from the ZIP central directory. * * @param buf//w w w .j ava2s . c o m * The byte buffer containing the ZIP central directory. * * @return A list with all ZipEntries. */ private static ArrayList<ExtendedZipEntry> parseHeaders(ByteBuffer buf) { ArrayList<ExtendedZipEntry> zeList = new ArrayList<ExtendedZipEntry>(); buf.order(ByteOrder.LITTLE_ENDIAN); int offset = 0; while (offset < buf.limit() - ZipConstants.CENHDR) { short fileNameLen = buf.getShort(offset + ZipConstants.CENNAM); short extraFieldLen = buf.getShort(offset + ZipConstants.CENEXT); short fileCommentLen = buf.getShort(offset + ZipConstants.CENCOM); String fileName = new String(buf.array(), offset + ZipConstants.CENHDR, fileNameLen); ExtendedZipEntry zeGermans = new ExtendedZipEntry(fileName); zeGermans.setMethod(buf.getShort(offset + ZipConstants.CENHOW)); CRC32 crc = new CRC32(); crc.update(buf.getInt(offset + ZipConstants.CENCRC)); zeGermans.setCrc(crc.getValue()); zeGermans.setCompressedSize(buf.getInt(offset + ZipConstants.CENSIZ)); zeGermans.setSize(buf.getInt(offset + ZipConstants.CENLEN)); zeGermans.setInternalAttr(buf.getShort(offset + ZipConstants.CENATT)); zeGermans.setExternalAttr(buf.getShort(offset + ZipConstants.CENATX)); zeGermans.setOffset((long) buf.getInt(offset + ZipConstants.CENOFF)); zeGermans.setExtraLength(extraFieldLen); zeList.add(zeGermans); offset += ZipConstants.CENHDR + fileNameLen + extraFieldLen + fileCommentLen; } return zeList; }