List of usage examples for java.nio ByteBuffer clear
public final Buffer clear()
From source file:org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd.java
@SuppressWarnings("unchecked") private void runTest(Table hTable, int expectedSize) throws IOException { // [0, 2, ?, ?, ?, ?, 0, 0, 0, 1] byte[] fuzzyKey1 = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(fuzzyKey1); buf.clear(); buf.putShort((short) 2); for (int i = 0; i < 4; i++) buf.put(fuzzyValue);/*from ww w . java2s . co m*/ buf.putInt((short) 1); byte[] mask1 = new byte[] { 0, 0, 1, 1, 1, 1, 0, 0, 0, 0 }; byte[] fuzzyKey2 = new byte[10]; buf = ByteBuffer.wrap(fuzzyKey2); buf.clear(); buf.putShort((short) 2); buf.putInt((short) 2); for (int i = 0; i < 4; i++) buf.put(fuzzyValue); byte[] mask2 = new byte[] { 0, 0, 0, 0, 0, 0, 1, 1, 1, 1 }; Pair<byte[], byte[]> pair1 = new Pair<byte[], byte[]>(fuzzyKey1, mask1); Pair<byte[], byte[]> pair2 = new Pair<byte[], byte[]>(fuzzyKey2, mask2); FuzzyRowFilter fuzzyRowFilter1 = new FuzzyRowFilter(Lists.newArrayList(pair1)); FuzzyRowFilter fuzzyRowFilter2 = new FuzzyRowFilter(Lists.newArrayList(pair2)); // regular test - we expect 1 row back (5 KVs) runScanner(hTable, expectedSize, fuzzyRowFilter1, fuzzyRowFilter2); }
From source file:xbird.util.nio.RemoteMemoryMappedFile.java
private int[] recvResponse(final ReadableByteChannel channel, final ByteBuffer buf, final int dstlen) throws IOException { buf.clear(); // set endian optimized for this machine final boolean isBufBigEndian = (buf.order() == ByteOrder.BIG_ENDIAN); if (_bigEndian != isBufBigEndian) { buf.order(_bigEndian ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN); }/*from ww w . j av a2 s. c o m*/ NIOUtils.readFully(channel, buf, _pageSize); buf.flip(); IntBuffer ibuf = buf.asIntBuffer(); int[] dst = new int[dstlen]; ibuf.get(dst); return dst; }
From source file:org.apache.kylin.storage.hbase.cube.v1.filter.TestFuzzyRowFilterV2EndToEnd.java
private void runTest1(HTable hTable) throws IOException { // [0, 2, ?, ?, ?, ?, 0, 0, 0, 1] byte[] mask = new byte[] { 0, 0, 1, 1, 1, 1, 0, 0, 0, 0 }; List<Pair<byte[], byte[]>> list = new ArrayList<Pair<byte[], byte[]>>(); for (int i = 0; i < totalFuzzyKeys; i++) { byte[] fuzzyKey = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(fuzzyKey); buf.clear(); buf.putShort((short) 2); for (int j = 0; j < 4; j++) { buf.put(fuzzyValue);/*from w w w. ja v a 2 s. c om*/ } buf.putInt(i); Pair<byte[], byte[]> pair = Pair.newPair(fuzzyKey, mask); list.add(pair); } int expectedSize = secondPartCardinality * totalFuzzyKeys * colQualifiersTotal; FuzzyRowFilterV2 fuzzyRowFilter0 = new FuzzyRowFilterV2(list); // Filters are not stateless - we can't reuse them FuzzyRowFilterV2 fuzzyRowFilter1 = new FuzzyRowFilterV2(list); // regular test runScanner(hTable, expectedSize, fuzzyRowFilter0); // optimized from block cache runScanner(hTable, expectedSize, fuzzyRowFilter1); }
From source file:org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd.java
private void runTest1(Table hTable) throws IOException { // [0, 2, ?, ?, ?, ?, 0, 0, 0, 1] byte[] mask = new byte[] { 0, 0, 1, 1, 1, 1, 0, 0, 0, 0 }; List<Pair<byte[], byte[]>> list = new ArrayList<Pair<byte[], byte[]>>(); for (int i = 0; i < totalFuzzyKeys; i++) { byte[] fuzzyKey = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(fuzzyKey); buf.clear(); buf.putShort((short) 2); for (int j = 0; j < 4; j++) { buf.put(fuzzyValue);/*from w ww . j a va 2s.c o m*/ } buf.putInt(i); Pair<byte[], byte[]> pair = new Pair<byte[], byte[]>(fuzzyKey, mask); list.add(pair); } int expectedSize = secondPartCardinality * totalFuzzyKeys * colQualifiersTotal; FuzzyRowFilter fuzzyRowFilter0 = new FuzzyRowFilter(list); // Filters are not stateless - we can't reuse them FuzzyRowFilter fuzzyRowFilter1 = new FuzzyRowFilter(list); // regular test runScanner(hTable, expectedSize, fuzzyRowFilter0); // optimized from block cache runScanner(hTable, expectedSize, fuzzyRowFilter1); }
From source file:org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.MappableBlock.java
/** * Verifies the block's checksum. This is an I/O intensive operation. * @return if the block was successfully checksummed. *///from w ww . j ava 2 s . co m private static void verifyChecksum(long length, FileInputStream metaIn, FileChannel blockChannel, String blockFileName) throws IOException, ChecksumException { // Verify the checksum from the block's meta file // Get the DataChecksum from the meta file header BlockMetadataHeader header = BlockMetadataHeader.readHeader( new DataInputStream(new BufferedInputStream(metaIn, BlockMetadataHeader.getHeaderSize()))); FileChannel metaChannel = null; try { metaChannel = metaIn.getChannel(); if (metaChannel == null) { throw new IOException("Block InputStream meta file has no FileChannel."); } DataChecksum checksum = header.getChecksum(); final int bytesPerChecksum = checksum.getBytesPerChecksum(); final int checksumSize = checksum.getChecksumSize(); final int numChunks = (8 * 1024 * 1024) / bytesPerChecksum; ByteBuffer blockBuf = ByteBuffer.allocate(numChunks * bytesPerChecksum); ByteBuffer checksumBuf = ByteBuffer.allocate(numChunks * checksumSize); // Verify the checksum int bytesVerified = 0; while (bytesVerified < length) { Preconditions.checkState(bytesVerified % bytesPerChecksum == 0, "Unexpected partial chunk before EOF"); assert bytesVerified % bytesPerChecksum == 0; int bytesRead = fillBuffer(blockChannel, blockBuf); if (bytesRead == -1) { throw new IOException("checksum verification failed: premature EOF"); } blockBuf.flip(); // Number of read chunks, including partial chunk at end int chunks = (bytesRead + bytesPerChecksum - 1) / bytesPerChecksum; checksumBuf.limit(chunks * checksumSize); fillBuffer(metaChannel, checksumBuf); checksumBuf.flip(); checksum.verifyChunkedSums(blockBuf, checksumBuf, blockFileName, bytesVerified); // Success bytesVerified += bytesRead; blockBuf.clear(); checksumBuf.clear(); } } finally { IOUtils.closeQuietly(metaChannel); } }
From source file:org.apache.kylin.storage.hbase.cube.v1.filter.TestFuzzyRowFilterV2EndToEnd.java
private void runTest2(HTable hTable) throws IOException { // [0, 0, ?, ?, ?, ?, 0, 0, 0, 0] , [0, 1, ?, ?, ?, ?, 0, 0, 0, 1]... byte[] mask = new byte[] { 0, 0, 1, 1, 1, 1, 0, 0, 0, 0 }; List<Pair<byte[], byte[]>> list = new ArrayList<Pair<byte[], byte[]>>(); for (int i = 0; i < totalFuzzyKeys; i++) { byte[] fuzzyKey = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(fuzzyKey); buf.clear(); buf.putShort((short) (i * 2)); for (int j = 0; j < 4; j++) { buf.put(fuzzyValue);/* ww w . j ava 2 s .co m*/ } buf.putInt(i * 2); Pair<byte[], byte[]> pair = Pair.newPair(fuzzyKey, mask); list.add(pair); } int expectedSize = totalFuzzyKeys * secondPartCardinality * colQualifiersTotal; FuzzyRowFilterV2 fuzzyRowFilter0 = new FuzzyRowFilterV2(list); // Filters are not stateless - we can't reuse them FuzzyRowFilterV2 fuzzyRowFilter1 = new FuzzyRowFilterV2(list); // regular test runScanner(hTable, expectedSize, fuzzyRowFilter0); // optimized from block cache runScanner(hTable, expectedSize, fuzzyRowFilter1); }
From source file:org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd.java
private void runTest2(Table hTable) throws IOException { // [0, 0, ?, ?, ?, ?, 0, 0, 0, 0] , [0, 1, ?, ?, ?, ?, 0, 0, 0, 1]... byte[] mask = new byte[] { 0, 0, 1, 1, 1, 1, 0, 0, 0, 0 }; List<Pair<byte[], byte[]>> list = new ArrayList<Pair<byte[], byte[]>>(); for (int i = 0; i < totalFuzzyKeys; i++) { byte[] fuzzyKey = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(fuzzyKey); buf.clear(); buf.putShort((short) (i * 2)); for (int j = 0; j < 4; j++) { buf.put(fuzzyValue);// ww w .ja v a 2 s . c om } buf.putInt(i * 2); Pair<byte[], byte[]> pair = new Pair<byte[], byte[]>(fuzzyKey, mask); list.add(pair); } int expectedSize = totalFuzzyKeys * secondPartCardinality * colQualifiersTotal; FuzzyRowFilter fuzzyRowFilter0 = new FuzzyRowFilter(list); // Filters are not stateless - we can't reuse them FuzzyRowFilter fuzzyRowFilter1 = new FuzzyRowFilter(list); // regular test runScanner(hTable, expectedSize, fuzzyRowFilter0); // optimized from block cache runScanner(hTable, expectedSize, fuzzyRowFilter1); }
From source file:org.apache.nifi.processor.util.listen.dispatcher.SocketChannelDispatcher.java
@Override public void run() { while (!stopped) { try {//from w w w . j a v a 2s. c o m int selected = selector.select(); // if stopped the selector could already be closed which would result in a ClosedSelectorException if (selected > 0 && !stopped) { Iterator<SelectionKey> selectorKeys = selector.selectedKeys().iterator(); // if stopped we don't want to modify the keys because close() may still be in progress while (selectorKeys.hasNext() && !stopped) { SelectionKey key = selectorKeys.next(); selectorKeys.remove(); if (!key.isValid()) { continue; } if (key.isAcceptable()) { // Handle new connections coming in final ServerSocketChannel channel = (ServerSocketChannel) key.channel(); final SocketChannel socketChannel = channel.accept(); // Check for available connections if (currentConnections.incrementAndGet() > maxConnections) { currentConnections.decrementAndGet(); logger.warn("Rejecting connection from {} because max connections has been met", new Object[] { socketChannel.getRemoteAddress().toString() }); IOUtils.closeQuietly(socketChannel); continue; } logger.debug("Accepted incoming connection from {}", new Object[] { socketChannel.getRemoteAddress().toString() }); // Set socket to non-blocking, and register with selector socketChannel.configureBlocking(false); SelectionKey readKey = socketChannel.register(selector, SelectionKey.OP_READ); // Prepare the byte buffer for the reads, clear it out ByteBuffer buffer = bufferPool.poll(); buffer.clear(); buffer.mark(); // If we have an SSLContext then create an SSLEngine for the channel SSLSocketChannel sslSocketChannel = null; if (sslContext != null) { final SSLEngine sslEngine = sslContext.createSSLEngine(); sslEngine.setUseClientMode(false); switch (clientAuth) { case REQUIRED: sslEngine.setNeedClientAuth(true); break; case WANT: sslEngine.setWantClientAuth(true); break; case NONE: sslEngine.setNeedClientAuth(false); sslEngine.setWantClientAuth(false); break; } sslSocketChannel = new SSLSocketChannel(sslEngine, socketChannel); } // Attach the buffer and SSLSocketChannel to the key SocketChannelAttachment attachment = new SocketChannelAttachment(buffer, sslSocketChannel); readKey.attach(attachment); } else if (key.isReadable()) { // Clear out the operations the select is interested in until done reading key.interestOps(0); // Create a handler based on the protocol and whether an SSLEngine was provided or not final Runnable handler; if (sslContext != null) { handler = handlerFactory.createSSLHandler(key, this, charset, eventFactory, events, logger); } else { handler = handlerFactory.createHandler(key, this, charset, eventFactory, events, logger); } // run the handler executor.execute(handler); } } } // Add back all idle sockets to the select SelectionKey key; while ((key = keyQueue.poll()) != null) { key.interestOps(SelectionKey.OP_READ); } } catch (IOException e) { logger.error("Error accepting connection from SocketChannel", e); } } }
From source file:org.apache.kylin.storage.hbase.cube.v1.filter.TestFuzzyRowFilterV2EndToEnd.java
@SuppressWarnings("deprecation") @Test//from ww w . ja va 2s . c o m public void testFilterList() throws Exception { String cf = "f"; String table = "TestFuzzyRowFiltersInFilterList"; HTable ht = TEST_UTIL.createTable(TableName.valueOf(table), Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) // 4 byte qualifier // 4 byte value for (int i1 = 0; i1 < 5; i1++) { for (int i2 = 0; i2 < 5; i2++) { byte[] rk = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(rk); buf.clear(); buf.putShort((short) 2); buf.putInt(i1); buf.putInt(i2); // Each row contains 5 columns for (int c = 0; c < 5; c++) { byte[] cq = new byte[4]; Bytes.putBytes(cq, 0, Bytes.toBytes(c), 0, 4); Put p = new Put(rk); p.setDurability(Durability.SKIP_WAL); p.add(cf.getBytes(), cq, Bytes.toBytes(c)); ht.put(p); LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: " + Bytes.toStringBinary(cq)); } } } TEST_UTIL.flush(); // test passes if we get back 5 KV's (1 row) runTest(ht, 5); }
From source file:org.apache.hadoop.hbase.client.TestResult.java
public void testMultiVersionLoadValue() throws Exception { KeyValue[] kvs1 = genKVs(row, family, value, 1, 100); KeyValue[] kvs2 = genKVs(row, family, value, 200, 100); KeyValue[] kvs = new KeyValue[kvs1.length + kvs2.length]; System.arraycopy(kvs1, 0, kvs, 0, kvs1.length); System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length); Arrays.sort(kvs, KeyValue.COMPARATOR); ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024); Result r = Result.create(kvs); for (int i = 0; i < 100; ++i) { final byte[] qf = Bytes.toBytes(i); loadValueBuffer.clear(); r.loadValue(family, qf, loadValueBuffer); loadValueBuffer.flip();//from w w w.j ava 2 s. c om assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), loadValueBuffer); assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), r.getValueAsByteBuffer(family, qf)); } }