List of usage examples for java.nio ByteBuffer remaining
public final int remaining()
From source file:org.apache.hadoop.fs.TestEnhancedByteBufferAccess.java
@Test public void test2GBMmapLimit() throws Exception { Assume.assumeTrue(BlockReaderTestUtil.shouldTestLargeFiles()); HdfsConfiguration conf = initZeroCopyTest(); final long TEST_FILE_LENGTH = 2469605888L; conf.set(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, "NULL"); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, TEST_FILE_LENGTH); MiniDFSCluster cluster = null;/*from w w w . ja v a 2 s. co m*/ final Path TEST_PATH = new Path("/a"); final String CONTEXT = "test2GBMmapLimit"; conf.set(DFSConfigKeys.DFS_CLIENT_CONTEXT, CONTEXT); FSDataInputStream fsIn = null, fsIn2 = null; ByteBuffer buf1 = null, buf2 = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); cluster.waitActive(); DistributedFileSystem fs = cluster.getFileSystem(); DFSTestUtil.createFile(fs, TEST_PATH, TEST_FILE_LENGTH, (short) 1, 0xB); DFSTestUtil.waitReplication(fs, TEST_PATH, (short) 1); fsIn = fs.open(TEST_PATH); buf1 = fsIn.read(null, 1, EnumSet.of(ReadOption.SKIP_CHECKSUMS)); Assert.assertEquals(1, buf1.remaining()); fsIn.releaseBuffer(buf1); buf1 = null; fsIn.seek(2147483640L); buf1 = fsIn.read(null, 1024, EnumSet.of(ReadOption.SKIP_CHECKSUMS)); Assert.assertEquals(7, buf1.remaining()); Assert.assertEquals(Integer.MAX_VALUE, buf1.limit()); fsIn.releaseBuffer(buf1); buf1 = null; Assert.assertEquals(2147483647L, fsIn.getPos()); try { buf1 = fsIn.read(null, 1024, EnumSet.of(ReadOption.SKIP_CHECKSUMS)); Assert.fail("expected UnsupportedOperationException"); } catch (UnsupportedOperationException e) { // expected; can't read past 2GB boundary. } fsIn.close(); fsIn = null; // Now create another file with normal-sized blocks, and verify we // can read past 2GB final Path TEST_PATH2 = new Path("/b"); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 268435456L); DFSTestUtil.createFile(fs, TEST_PATH2, 1024 * 1024, TEST_FILE_LENGTH, 268435456L, (short) 1, 0xA); fsIn2 = fs.open(TEST_PATH2); fsIn2.seek(2147483640L); buf2 = fsIn2.read(null, 1024, EnumSet.of(ReadOption.SKIP_CHECKSUMS)); Assert.assertEquals(8, buf2.remaining()); Assert.assertEquals(2147483648L, fsIn2.getPos()); fsIn2.releaseBuffer(buf2); buf2 = null; buf2 = fsIn2.read(null, 1024, EnumSet.of(ReadOption.SKIP_CHECKSUMS)); Assert.assertEquals(1024, buf2.remaining()); Assert.assertEquals(2147484672L, fsIn2.getPos()); fsIn2.releaseBuffer(buf2); buf2 = null; } finally { if (buf1 != null) { fsIn.releaseBuffer(buf1); } if (buf2 != null) { fsIn2.releaseBuffer(buf2); } IOUtils.cleanup(null, fsIn, fsIn2); if (cluster != null) { cluster.shutdown(); } } }
From source file:com.healthmarketscience.jackcess.Table.java
/** * Writes the given name into the given buffer in the format as expected by * {@link #readName}.//from w ww . j a v a 2 s . com */ static void writeName(ByteBuffer buffer, String name, Charset charset) { ByteBuffer encName = Column.encodeUncompressedText(name, charset); buffer.putShort((short) encName.remaining()); buffer.put(encName); }
From source file:org.apache.orc.impl.mask.RedactMaskFactory.java
/** * Mask a string by finding the character category of each character * and replacing it with the matching literal. * @param source the source column vector * @param row the value index//from w w w.ja va2 s .c o m * @param target the target column vector */ void maskString(BytesColumnVector source, int row, BytesColumnVector target) { int expectedBytes = source.length[row]; ByteBuffer sourceBytes = ByteBuffer.wrap(source.vector[row], source.start[row], source.length[row]); // ensure we have enough space, if the masked data is the same size target.ensureValPreallocated(expectedBytes); byte[] outputBuffer = target.getValPreallocatedBytes(); int outputOffset = target.getValPreallocatedStart(); int outputStart = outputOffset; int index = 0; while (sourceBytes.remaining() > 0) { int cp = Text.bytesToCodePoint(sourceBytes); // Find the replacement for the current character. int replacement = getReplacement(cp); if (replacement == UNMASKED_CHAR || isIndexInUnmaskRange(index, source.length[row])) { replacement = cp; } // increment index index++; int len = getCodepointLength(replacement); // If the translation will overflow the buffer, we need to resize. // This will only happen when the masked size is larger than the original. if (len + outputOffset > outputBuffer.length) { // Revise estimate how much we are going to need now. We are maximally // pesamistic here so that we don't have to expand again for this value. int currentOutputStart = outputStart; int currentOutputLength = outputOffset - currentOutputStart; expectedBytes = currentOutputLength + len + sourceBytes.remaining() * 4; // Expand the buffer to fit the new estimate target.ensureValPreallocated(expectedBytes); // Copy over the bytes we've already written for this value and move // the pointers to the new output buffer. byte[] oldBuffer = outputBuffer; outputBuffer = target.getValPreallocatedBytes(); outputOffset = target.getValPreallocatedStart(); outputStart = outputOffset; System.arraycopy(oldBuffer, currentOutputStart, outputBuffer, outputOffset, currentOutputLength); outputOffset += currentOutputLength; } // finally copy the bytes writeCodepoint(replacement, outputBuffer, outputOffset, len); outputOffset += len; } target.setValPreallocated(row, outputOffset - outputStart); }
From source file:com.android.camera2.its.ItsUtils.java
public static byte[] getDataFromImage(Image image) throws ItsException { int format = image.getFormat(); int width = image.getWidth(); int height = image.getHeight(); byte[] data = null; // Read image data Plane[] planes = image.getPlanes();//from ww w .j av a 2 s. c om // Check image validity if (!checkAndroidImageFormat(image)) { throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat()); } if (format == ImageFormat.JPEG) { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. ByteBuffer buffer = planes[0].getBuffer(); data = new byte[buffer.capacity()]; buffer.get(data); return data; } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR || format == ImageFormat.RAW10) { int offset = 0; data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { ByteBuffer buffer = planes[i].getBuffer(); int rowStride = planes[i].getRowStride(); int pixelStride = planes[i].getPixelStride(); int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d", format, i, width, height, rowStride, pixelStride)); // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. int w = (i == 0) ? width : width / 2; int h = (i == 0) ? height : height / 2; for (int row = 0; row < h; row++) { if (pixelStride == bytesPerPixel) { // Special case: optimized read of the entire row int length = w * bytesPerPixel; buffer.get(data, offset, length); // Advance buffer the remainder of the row stride buffer.position(buffer.position() + rowStride - length); offset += length; } else { // Generic case: should work for any pixelStride but slower. // Use intermediate buffer to avoid read byte-by-byte from // DirectByteBuffer, which is very bad for performance. // Also need avoid access out of bound by only reading the available // bytes in the bytebuffer. int readSize = rowStride; if (buffer.remaining() < readSize) { readSize = buffer.remaining(); } buffer.get(rowData, 0, readSize); if (pixelStride >= 1) { for (int col = 0; col < w; col++) { data[offset++] = rowData[col * pixelStride]; } } else { // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for // example with RAW10. Just copy the buffer, dropping any padding at // the end of the row. int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; System.arraycopy(rowData, 0, data, offset, length); offset += length; } } } } Logt.i(TAG, String.format("Done reading image, format %d", format)); return data; } else { throw new ItsException("Unsupported image format: " + format); } }
From source file:byps.test.TestSerializePrimitiveTypes.java
public void internaltestPerformancePrimitiveTypes(BBinaryModel protocol, int objCount, int loopCount, int flags) throws IOException { TestUtils.protocol = protocol;/* www. ja va 2 s . c o m*/ BTransport transport = TestUtils.createTransport(flags, BMessageHeader.BYPS_VERSION_CURRENT, 0); BOutput bout = transport.getOutput(); long bytesCount = 0; ByteBuffer buf = null; ListTypes obj = new ListTypes(); obj.primitiveTypes1 = new ArrayList<PrimitiveTypes>(); for (int i = 0; i < objCount; i++) { PrimitiveTypes item = TestUtils.createObjectPrimitiveTypes(); obj.primitiveTypes1.add(item); } long t1 = System.currentTimeMillis(); ByteArrayOutputStream os = null; for (int i = 0; i < loopCount; i++) { bout.store(obj); buf = bout.toByteBuffer(); os = new ByteArrayOutputStream(); BWire.bufferToStream(buf, (transport.getWire().getFlags() & BWire.FLAG_GZIP) != 0, os); } long t2 = System.currentTimeMillis(); bytesCount = (long) loopCount * buf.remaining(); //TestUtils.printBuffer(log, buf); long t3 = System.currentTimeMillis(); for (int i = 0; i < loopCount; i++) { ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray()); buf = BWire.bufferFromStream(is); BInput bin = transport.getInput(null, buf); Object objR = bin.load(); if (i == 0) { TestUtils.assertEquals(null, "obj", obj, objR); } buf.flip(); } long t4 = System.currentTimeMillis(); log.info("bmodel=" + protocol.toString().substring(0, 2) + ", gzip=" + ((flags & BWire.FLAG_GZIP) != 0) + ", #objs=" + String.format("%6d", objCount) + ", #loops=" + String.format("%6d", loopCount) + ", ser-ms=" + String.format("%6d", (t2 - t1) + (t4 - t3)) + "/" + String.format("%6d", (t2 - t1)) + "/" + String.format("%6d", (t4 - t3)) + ", #kb-per-message=" + String.format("%4.2f", (double) os.size() / 1000) + ", #kb-total=" + String.format("%4.2f", (double) bytesCount / 1000)); }
From source file:com.esri.geoevent.solutions.adapter.cap.CAPInboundAdapter.java
@Override public void receive(ByteBuffer buffer, String channelId) { //System.out.println("Processing..."); String data;/*from w w w .j a va 2s . c o m*/ while (buffer.hasRemaining()) { buffer.mark(); try { byte[] bytearray = new byte[buffer.remaining()]; buffer.get(bytearray); data = new String(bytearray); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(new InputSource(new StringReader(data))); NodeList alerts = doc.getElementsByTagName("alert"); System.out.println(); System.out.println(new Date().toString() + ": Processing " + alerts.getLength() + " alerts."); int procAlerts = 0; for (int a = 0; a < alerts.getLength(); a++) { Element alert = (Element) alerts.item(a); NodeList nodeList = alert.getElementsByTagName("identifier"); Element line = (Element) nodeList.item(0); String identifier = getCharacterDataFromElement(line); if (MAP.containsKey(identifier)) { System.out.println( " Alert: " + identifier + " was processed previously. Skipping to next alert."); continue; } //System.out.println(" Alert "+ a + ": " + identifier + ". Processing now."); MAP.put(identifier, identifier); procAlerts++; GeoEvent alertMsg = parseAlert(alert, identifier); if (alertMsg != null) { geoEventListener.receive(alertMsg); System.out.println(" Alert " + a + ": " + identifier); System.out.println(" " + alertMsg.toString()); NodeList codes = alert.getElementsByTagName("code"); for (int c = 0; c < codes.getLength(); c++) { Element code = (Element) codes.item(c); GeoEvent codeMsg = parseAlertCode(code, identifier); if (codeMsg != null) { geoEventListener.receive(codeMsg); System.out.println(" Code: " + codeMsg.toString()); } } NodeList infos = alert.getElementsByTagName("info"); for (int i = 0; i < infos.getLength(); i++) { Element info = (Element) infos.item(i); String infoID = identifier + "_" + i; GeoEvent infoMsg = parseAlertInfo(info, identifier, infoID); if (infoMsg != null) { geoEventListener.receive(infoMsg); System.out.println(" Info " + i + ": " + infoID); System.out.println(" " + infoMsg.toString()); NodeList categories = info.getElementsByTagName("category"); for (int cat = 0; cat < categories.getLength(); cat++) { Element category = (Element) categories.item(cat); GeoEvent catMsg = parseInfoCategory(category, identifier, infoID); if (catMsg != null) { geoEventListener.receive(catMsg); System.out.println(" Category: " + catMsg.toString()); } } NodeList eventCodes = info.getElementsByTagName("eventCode"); for (int e = 0; e < eventCodes.getLength(); e++) { Element eventCode = (Element) eventCodes.item(e); GeoEvent eMsg = parseInfoEventCode(eventCode, identifier, infoID); if (eMsg != null) { geoEventListener.receive(eMsg); System.out.println(" Event code: " + eMsg.toString()); } } NodeList responseTypes = info.getElementsByTagName("responseType"); for (int rt = 0; rt < responseTypes.getLength(); rt++) { Element responseType = (Element) responseTypes.item(rt); GeoEvent rtMsg = parseInfoResponseType(responseType, identifier, infoID); if (rtMsg != null) { geoEventListener.receive(rtMsg); System.out.println(" Response type: " + rtMsg.toString()); } } NodeList parameters = info.getElementsByTagName("parameter"); for (int p = 0; p < parameters.getLength(); p++) { Element parameter = (Element) parameters.item(p); GeoEvent pMsg = parseInfoParameter(parameter, identifier, infoID); if (pMsg != null) { geoEventListener.receive(pMsg); System.out.println(" Parameter: " + pMsg.toString()); } } NodeList resources = info.getElementsByTagName("resource"); for (int r = 0; r < resources.getLength(); r++) { Element resource = (Element) resources.item(r); GeoEvent rMsg = parseInfoResource(resource, identifier, infoID); if (rMsg != null) { geoEventListener.receive(rMsg); System.out.println(" Resource " + r + ": "); System.out.println(" " + rMsg.toString()); } } NodeList areas = info.getElementsByTagName("area"); for (int ar = 0; ar < areas.getLength(); ar++) { Element area = (Element) areas.item(ar); String areaID = infoID + "_" + ar; GeoEvent areaMsg = parseInfoArea(area, identifier, infoID, areaID); if (areaMsg != null) { geoEventListener.receive(areaMsg); System.out.println(" Area " + ar + ": "); System.out.println(" " + areaMsg.toString()); NodeList polygons = info.getElementsByTagName("polygon"); for (int pg = 0; pg < polygons.getLength(); pg++) { Element polygon = (Element) polygons.item(pg); System.out.println(" Polygon " + pg + ": "); GeoEvent areaGeomMsg = parseInfoAreaGeom(polygon, null, null, identifier, infoID, areaID); if (areaGeomMsg != null) { geoEventListener.receive(areaGeomMsg); System.out.println(" " + areaGeomMsg.toString()); } else { System.out.println(" " + getCharacterDataFromElement(polygon)); } } NodeList circles = info.getElementsByTagName("circle"); for (int c = 0; c < circles.getLength(); c++) { Element circle = (Element) circles.item(c); System.out.println(" Circle " + c + ": "); GeoEvent areaGeomMsg = parseInfoAreaGeom(null, circle, null, identifier, infoID, areaID); if (areaGeomMsg != null) { geoEventListener.receive(areaGeomMsg); System.out.println(" " + areaGeomMsg.toString()); } else { System.out.println(" " + getCharacterDataFromElement(circle)); } } NodeList geocodes = info.getElementsByTagName("geocode"); for (int g = 0; g < geocodes.getLength(); g++) { Element geocode = (Element) geocodes.item(g); GeoEvent areaGeomMsg = parseInfoAreaGeom(null, null, geocode, identifier, infoID, areaID); if (areaGeomMsg != null) { geoEventListener.receive(areaGeomMsg); System.out.println(" Geocode " + g + ": "); System.out.println(" " + areaGeomMsg.toString()); } } } } } } } } //System.out.println("Processed " + procAlerts + " of " + alerts.getLength() + " alerts."); } catch (Exception e) { String msg = e.getMessage(); System.out.println(msg); e.printStackTrace(); } return; } }
From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java
protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException { final ByteBuffer inputBuf = this.inputBuf; final NioFixedSegment fileIO = this.fileIO; assert (inputBuf != null); assert (fileIO != null); final long numTrainingExamples = count; final Reporter reporter = getReporter(); final Counter iterCounter = (reporter == null) ? null : reporter.getCounter("hivemall.mf.MatrixFactorization$Counter", "iteration"); try {//from w w w. j a v a 2 s . c o m if (lastWritePos == 0) {// run iterations w/o temporary file if (inputBuf.position() == 0) { return; // no training example } inputBuf.flip(); int iter = 2; for (; iter <= iterations; iter++) { reportProgress(reporter); setCounterValue(iterCounter, iter); while (inputBuf.remaining() > 0) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } inputBuf.rewind(); } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus " + NumberUtils.formatNumber(count) + " training updates in total) "); } else {// read training examples in the temporary file and invoke train for each example // write training examples in buffer to a temporary file if (inputBuf.position() > 0) { writeBuffer(inputBuf, fileIO, lastWritePos); } else if (lastWritePos == 0) { return; // no training example } try { fileIO.flush(); } catch (IOException e) { throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e); } if (logger.isInfoEnabled()) { File tmpFile = fileIO.getFile(); logger.info( "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: " + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")"); } // run iterations int iter = 2; for (; iter <= iterations; iter++) { setCounterValue(iterCounter, iter); inputBuf.clear(); long seekPos = 0L; while (true) { reportProgress(reporter); // TODO prefetch // writes training examples to a buffer in the temporary file final int bytesRead; try { bytesRead = fileIO.read(seekPos, inputBuf); } catch (IOException e) { throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(), e); } if (bytesRead == 0) { // reached file EOF break; } assert (bytesRead > 0) : bytesRead; seekPos += bytesRead; // reads training examples from a buffer inputBuf.flip(); int remain = inputBuf.remaining(); assert (remain > 0) : remain; for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) { int user = inputBuf.getInt(); int item = inputBuf.getInt(); double rating = inputBuf.getDouble(); // invoke train count++; train(user, item, rating); } inputBuf.compact(); } cvState.multiplyLoss(0.5d); if (cvState.isConverged(iter, numTrainingExamples)) { break; } } logger.info("Performed " + Math.min(iter, iterations) + " iterations of " + NumberUtils.formatNumber(numTrainingExamples) + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count) + " training updates in total)"); } } finally { // delete the temporary file and release resources try { fileIO.close(true); } catch (IOException e) { throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e); } this.inputBuf = null; this.fileIO = null; } }
From source file:com.haines.ml.rce.io.protostuff.ProtostuffEventMarshalBufferUnitTest.java
@Test public void givenCandidate_whenCallingFullMarshalWithEdgeCaseBufferSize_thenExpectedMessageReturned() throws IOException { Iterable<ByteBuffer> buffers = loadByteBuffersFromFile("testBuffer1.txt"); Iterator<ByteBuffer> buffersIt = buffers.iterator(); int i = 0;/*from w ww. ja v a2 s. c o m*/ int totalBytesRead = 0; while (buffersIt.hasNext()) { ByteBuffer buffer = buffersIt.next(); totalBytesRead += buffer.remaining(); //LOG.debug("trying buffer: "+(i++)); boolean moreToRead = candidate.marshal(buffer); //LOG.debug("inner message now: "+ReflectionToStringBuilder.toString(candidate.messageBuffer, ToStringStyle.MULTI_LINE_STYLE)); assertThat(moreToRead, is(equalTo(!buffersIt.hasNext()))); } System.out.println(totalBytesRead + " bytes read"); TestMessage message = candidate.buildEventAndResetBuffer(); assertFullySerialisedMessage(message, true); }
From source file:com.haines.ml.rce.io.protostuff.ProtostuffEventMarshalBufferUnitTest.java
@Test public void givenCandidate_whenCallingFullMarshalWithEdgeCase2BufferSize_thenExpectedMessageReturned() throws IOException { Iterable<ByteBuffer> buffers = loadByteBuffersFromFile("testBuffer2.txt"); Iterator<ByteBuffer> buffersIt = buffers.iterator(); int i = 0;/* w ww.jav a 2s.c o m*/ int totalBytesRead = 0; while (buffersIt.hasNext()) { ByteBuffer buffer = buffersIt.next(); totalBytesRead += buffer.remaining(); LOG.debug("trying buffer: " + (i++)); boolean moreToRead = candidate.marshal(buffer); LOG.debug("inner message now: " + ReflectionToStringBuilder.toString(candidate.messageBuffer, ToStringStyle.MULTI_LINE_STYLE)); assertThat(moreToRead, is(equalTo(!buffersIt.hasNext()))); } System.out.println(totalBytesRead + " bytes read"); TestMessage message = candidate.buildEventAndResetBuffer(); assertFullySerialisedMessage(message, true); }
From source file:org.apache.bookkeeper.bookie.Bookie.java
public ByteBuffer readEntry(long ledgerId, long entryId) throws IOException, NoLedgerException { long requestNanos = MathUtils.nowInNano(); boolean success = false; int entrySize = 0; try {/*from ww w .j av a 2 s.co m*/ LedgerDescriptor handle = handles.getReadOnlyHandle(ledgerId); LOG.trace("Reading {}@{}", entryId, ledgerId); ByteBuffer entry = handle.readEntry(entryId); entrySize = entry.remaining(); readBytes.add(entrySize); success = true; return entry; } finally { long elapsedNanos = MathUtils.elapsedNanos(requestNanos); if (success) { readEntryStats.registerSuccessfulEvent(elapsedNanos, TimeUnit.NANOSECONDS); readBytesStats.registerSuccessfulValue(entrySize); } else { readEntryStats.registerFailedEvent(elapsedNanos, TimeUnit.NANOSECONDS); readBytesStats.registerFailedValue(entrySize); } } }