List of usage examples for java.nio ByteBuffer flip
public final Buffer flip()
From source file:org.apache.metron.profiler.hbase.SaltyRowKeyBuilderTest.java
/** * Build a row key that includes a single group that is an integer. *//* ww w .j a va 2 s . c o m*/ @Test public void testRowKeyWithOneIntegerGroup() throws Exception { // setup measurement.withGroups(Arrays.asList(200)); // the expected row key ByteBuffer buffer = ByteBuffer.allocate(100) .put(SaltyRowKeyBuilder.getSalt(measurement.getPeriod(), saltDivisor)) .put(measurement.getProfileName().getBytes()).put(measurement.getEntity().getBytes()) .put("200".getBytes()).putLong(1635701L); buffer.flip(); final byte[] expected = new byte[buffer.limit()]; buffer.get(expected, 0, buffer.limit()); // validate byte[] actual = rowKeyBuilder.rowKey(measurement); Assert.assertTrue(Arrays.equals(expected, actual)); }
From source file:com.intel.chimera.stream.AbstractCryptoStreamTest.java
private void doByteBufferWrite(String cipherClass, ByteArrayOutputStream baos, boolean withChannel) throws Exception { baos.reset();// w ww.j ava2 s . co m CryptoOutputStream out = getCryptoOutputStream(baos, getCipher(cipherClass), defaultBufferSize, iv, withChannel); ByteBuffer buf = ByteBuffer.allocateDirect(dataLen / 2); buf.put(data, 0, dataLen / 2); buf.flip(); int n1 = out.write(buf); buf.clear(); buf.put(data, n1, dataLen / 3); buf.flip(); int n2 = out.write(buf); buf.clear(); buf.put(data, n1 + n2, dataLen - n1 - n2); buf.flip(); int n3 = out.write(buf); Assert.assertEquals(dataLen, n1 + n2 + n3); out.flush(); InputStream in = getCryptoInputStream(new ByteArrayInputStream(encData), getCipher(cipherClass), defaultBufferSize, iv, withChannel); buf = ByteBuffer.allocate(dataLen + 100); byteBufferReadCheck(in, buf, 0); in.close(); }
From source file:org.alfresco.cacheserver.PatchServiceRESTTest.java
public PatchDocument getPatch(MultiPart resource) throws IOException { Integer blockSize = null;/*from w ww . j a v a2 s . co m*/ Integer matchCount = null; List<Integer> matchedBlocks = null; List<Patch> patches = new LinkedList<>(); int c = 0; InputStream is = null; Integer size = null; Integer lastMatchIndex = null; // This will iterate the individual parts of the multipart response for (BodyPart bodyPart : resource.getBodyParts()) { // if(bodyPart instanceof FormDataMultiPart) // { // System.out.printf( // "Multipart Body Part [Mime Type: %s]\n", // bodyPart.getMediaType()); // FormDataMultiPart mp = (FormDataMultiPart)bodyPart; // for (BodyPart bodyPart1 : mp.getBodyParts()) // { ContentDisposition contentDisposition = bodyPart.getContentDisposition(); // if(contentDisposition instanceof FormDataContentDisposition) // { // FormDataContentDisposition cd = (FormDataContentDisposition)contentDisposition; Map<String, String> parameters = contentDisposition.getParameters(); String name = parameters.get("name"); MediaType mediaType = bodyPart.getMediaType(); // System.out.println("Body Part " + name); if (name.equals("p_size")) { String s = getAsString(bodyPart); size = Integer.parseInt(s); c++; } else if (name.equals("p_last_match_idx")) { String s = getAsString(bodyPart); lastMatchIndex = Integer.parseInt(s); c++; } else if (name.equals("p_stream")) { BodyPartEntity bpEntity = (BodyPartEntity) bodyPart.getEntity(); is = bpEntity.getInputStream(); c++; } else if (name.equals("p_block_size")) { String s = getAsString(bodyPart); blockSize = Integer.parseInt(s); } else if (name.equals("p_match_count")) { String s = getAsString(bodyPart); matchCount = Integer.parseInt(s); } if (c >= 3) { c = 0; ByteBuffer bb = ByteBuffer.allocate(1024 * 20); // TODO ReadableByteChannel channel = Channels.newChannel(is); channel.read(bb); bb.flip(); byte[] buffer = new byte[bb.limit()]; bb.get(buffer); Patch patch = new Patch(lastMatchIndex, size, buffer); patches.add(patch); } // } // } // ByteBuffer bb = ByteBuffer.allocate(1024*20); // TODO // ReadableByteChannel channel = Channels.newChannel(is); // channel.read(bb); // bb.flip(); // byte[] buffer = new byte[bb.limit()]; // bb.get(buffer); // Patch patch = new Patch(lastMatchIndex, size, buffer); // patches.add(patch); } // else // { // System.out.printf( // "Embedded Body Part [Mime Type: %s, Length: %s]\n", // bodyPart.getMediaType(), bodyPart.getContentDisposition().getSize()); // // ContentDisposition contentDisposition = bodyPart.getContentDisposition(); // Map<String, String> parameters = contentDisposition.getParameters(); // String name = parameters.get("name"); //// if(contentDisposition instanceof FormDataContentDisposition) //// { //// FormDataContentDisposition cd = (FormDataContentDisposition)contentDisposition; //// String name = cd.getName(); // // Object entity = bodyPart.getEntity(); //// if(entity instanceof BodyPartEntity) //// { //// BodyPartEntity bpEntity = (BodyPartEntity)entity; //// if(name.equals("p_block_size")) //// { //// blockSize = Integer.parseInt((String)entity); //// } //// else if(name.equals("p_match_count")) //// { //// matchCount = Integer.parseInt((String)bodyPart.getEntity()); //// } //// else if(name.equals("p_matched_blocks")) //// { //// String matchedBlocksStr = (String)bodyPart.getEntity(); //// List<String> l = Arrays.asList(matchedBlocksStr.split(",")); //// matchedBlocks = l.stream() //// .filter(s -> s != null && !s.equals("")) //// .map(s -> Integer.parseInt(s)) //// .collect(Collectors.toList()); //// } //// } // } // } PatchDocument patchDocument = new PatchDocument(blockSize, matchedBlocks, patches); return patchDocument; }
From source file:org.apache.axis2.transport.nhttp.ServerHandler.java
/** * Process ready output by writing into the channel * @param conn the connection being processed * @param encoder the content encoder in use *//*from w ww. j av a 2 s . c o m*/ public void outputReady(final NHttpServerConnection conn, final ContentEncoder encoder) { HttpContext context = conn.getContext(); HttpResponse response = conn.getHttpResponse(); Pipe.SourceChannel source = (Pipe.SourceChannel) context.getAttribute(RESPONSE_SOURCE_CHANNEL); ByteBuffer outbuf = (ByteBuffer) context.getAttribute(RESPONSE_BUFFER); try { int bytesRead = source.read(outbuf); if (bytesRead == -1) { encoder.complete(); } else { outbuf.flip(); encoder.write(outbuf); outbuf.compact(); } if (encoder.isCompleted()) { source.close(); if (!connStrategy.keepAlive(response, context)) { conn.close(); } } } catch (IOException e) { handleException("I/O Error : " + e.getMessage(), e, conn); } }
From source file:org.apache.metron.profiler.hbase.SaltyRowKeyBuilderTest.java
/** * Build a row key that includes only one group. *//* w w w. j av a 2s. c o m*/ @Test public void testRowKeyWithOneGroup() throws Exception { // setup measurement.withGroups(Arrays.asList("group1")); // the expected row key ByteBuffer buffer = ByteBuffer.allocate(100) .put(SaltyRowKeyBuilder.getSalt(measurement.getPeriod(), saltDivisor)) .put(measurement.getProfileName().getBytes()).put(measurement.getEntity().getBytes()) .put("group1".getBytes()).putLong(1635701L); buffer.flip(); final byte[] expected = new byte[buffer.limit()]; buffer.get(expected, 0, buffer.limit()); // validate byte[] actual = rowKeyBuilder.rowKey(measurement); Assert.assertTrue(Arrays.equals(expected, actual)); }
From source file:de.hofuniversity.iisys.neo4j.websock.query.encoding.unsafe.DeflateJsonQueryHandler.java
private ByteBuffer fuse(final int length) { //fuses the buffers into a single array of the target length final ByteBuffer bb = ByteBuffer.allocate(length); for (byte[] buffer : fBuffers) { if (buffer.length > length - bb.position()) { bb.put(buffer, 0, length - bb.position()); } else {/*from w w w. j av a2 s .c o m*/ bb.put(buffer); } } //important bb.flip(); fBuffers.clear(); return bb; }
From source file:cn.ac.ncic.mastiff.io.coding.RedBlackTreeStringReader.java
@Override public byte[] ensureDecompressed() throws IOException { DataOutputBuffer transfer = new DataOutputBuffer(); transfer.write(inBuf.getData(), 12, inBuf.getLength() - 12); DataInputBuffer dib = new DataInputBuffer(); dib.reset(transfer.getData(), 0, transfer.getLength()); int dictionarySize = dib.readInt(); int length1 = dib.readInt(); byte[] data = transfer.getData(); transfer.close();//w w w .j ava2s .c o m dib.reset(data, Integer.SIZE + Integer.SIZE, length1); FlexibleEncoding.ORC.StreamName name = new FlexibleEncoding.ORC.StreamName(0, OrcProto.Stream.Kind.DICTIONARY_DATA); ByteBuffer inBuf1 = ByteBuffer.allocate(length1); inBuf1.put(dib.getData(), 0, dib.getLength()); inBuf1.flip(); InStream in = InStream.create("test1", inBuf1, null, dictionarySize); if (in.available() > 0) { dictionaryBuffer = new DynamicByteArray(64, in.available()); dictionaryBuffer.readAll(in); in.close(); // read the lengths google proto buffer name = new StreamName(1, OrcProto.Stream.Kind.LENGTH); dib.reset(data, 4 + 4 + length1, 4); int length2 = dib.readInt(); dib.reset(data, 4 + 4 + length1 + 4, length2); // in = streams.get(name); ByteBuffer inBuf2 = ByteBuffer.allocate(length2); inBuf2.put(dib.getData(), 0, length2); inBuf2.flip(); in = InStream.create("test2", inBuf2, null, dictionarySize); // IntegerReader lenReader = createIntegerReader(encodings.get(columnId) // .getKind(), in, false); IntegerReader lenReader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false); int offset = 0; dictionaryOffsets = new int[dictionarySize + 1]; for (int i = 0; i < dictionarySize; ++i) { dictionaryOffsets[i] = offset; offset += (int) lenReader.next(); } dictionaryOffsets[dictionarySize] = offset; in.close(); name = new FlexibleEncoding.ORC.StreamName(2, OrcProto.Stream.Kind.DATA); dib.reset(data, 4 + 4 + length1 + 4 + length2, 4); int length3 = dib.readInt(); dib.reset(data, 4 + 4 + length1 + 4 + length2 + 4, length3); ByteBuffer inBuf3 = ByteBuffer.allocate(length3); inBuf3.put(dib.getData(), 0, length3); inBuf3.flip(); in = InStream.create("test3", inBuf3, null, dictionarySize); reader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false); } inBuf.close(); DataOutputBuffer decoding = new DataOutputBuffer(); DataOutputBuffer offsets = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs); decoding.writeInt(startPos); int dataoffset = 12; String str; for (int i = 0; i < numPairs; i++) { str = readEachValue(null); decoding.writeUTF(str); // if(i<5){ // System.out.println("304 bin[i] "+str+" decoding "+ decoding.size()); // } dataoffset = decoding.size(); offsets.writeInt(dataoffset); } System.out.println("315 offset.size() " + offsets.size() + " decoding.szie " + decoding.size()); System.out.println("316 dataoffet " + dataoffset); decoding.write(offsets.getData(), 0, offsets.size()); inBuf.close(); offsets.close(); dib.close(); System.out.println("316 decoding " + decoding.size() + decoding.getLength() + " decoding.getData() " + decoding.getData().length); inBuf1.clear(); return decoding.getData(); }
From source file:bamboo.openhash.fileshare.FileShare.java
/** * Transfer wblocks from the wblocks array to the ready queue. *///from w w w.j a v a 2 s . c o m public void make_parents(boolean done) { for (int l = 0; l < wblocks.size(); ++l) { logger.debug("level " + l + " of " + wblocks.size() + " size=" + wblocks.elementAt(l).size() + " done=" + done); while ((wblocks.elementAt(l).size() >= BRANCHING) || (done && (wblocks.elementAt(l).size() > 1))) { int count = min(BRANCHING, wblocks.elementAt(l).size()); logger.debug("count=" + count); for (int i = 0; i < count; ++i) { ByteBuffer bb = wblocks.elementAt(l).removeFirst(); bb.flip(); md.update(secret); md.update(bb.array(), 0, bb.limit()); byte[] dig = md.digest(); ready.addLast(new Pair<byte[], ByteBuffer>(dig, bb)); if (l + 1 >= wblocks.size()) { wblocks.setSize(max(wblocks.size(), l + 2)); wblocks.setElementAt(new LinkedList<ByteBuffer>(), l + 1); } LinkedList<ByteBuffer> next_level = wblocks.elementAt(l + 1); if (next_level.isEmpty() || (next_level.getLast().position() == 1024)) { logger.debug("adding a new block to level " + (l + 1)); next_level.addLast(ByteBuffer.wrap(new byte[1024])); next_level.getLast().putInt(l + 1); } logger.debug("adding a digest to level " + (l + 1)); next_level.getLast().put(dig); } if (done) break; } } logger.debug("make_parents done"); }
From source file:org.apache.axis2.transport.nhttp.ClientHandler.java
/** * Process ready output (i.e. write request to remote server) * @param conn the connection being processed * @param encoder the encoder in use/*from w ww . j a va2 s .com*/ */ public void outputReady(final NHttpClientConnection conn, final ContentEncoder encoder) { HttpContext context = conn.getContext(); HttpResponse response = conn.getHttpResponse(); Pipe.SourceChannel source = (Pipe.SourceChannel) context.getAttribute(REQUEST_SOURCE_CHANNEL); ByteBuffer outbuf = (ByteBuffer) context.getAttribute(RESPONSE_BUFFER); try { int bytesRead = source.read(outbuf); if (bytesRead == -1) { encoder.complete(); } else { outbuf.flip(); encoder.write(outbuf); outbuf.compact(); } if (encoder.isCompleted()) { source.close(); } } catch (IOException e) { handleException("I/O Error : " + e.getMessage(), e, conn); } }
From source file:org.apache.metron.profiler.hbase.SaltyRowKeyBuilderTest.java
/** * Build a row key that includes a single group that is an integer. *//*from www. ja v a2 s . c o m*/ @Test public void testRowKeyWithMixedGroups() throws Exception { // setup measurement.withGroups(Arrays.asList(200, "group1")); // the expected row key ByteBuffer buffer = ByteBuffer.allocate(100) .put(SaltyRowKeyBuilder.getSalt(measurement.getPeriod(), saltDivisor)) .put(measurement.getProfileName().getBytes()).put(measurement.getEntity().getBytes()) .put("200".getBytes()).put("group1".getBytes()).putLong(1635701L); buffer.flip(); final byte[] expected = new byte[buffer.limit()]; buffer.get(expected, 0, buffer.limit()); // validate byte[] actual = rowKeyBuilder.rowKey(measurement); Assert.assertTrue(Arrays.equals(expected, actual)); }