List of usage examples for java.nio ByteBuffer capacity
public final int capacity()
From source file:com.slytechs.capture.file.editor.AbstractRawIterator.java
public void swap(final Long r1, final Long r2) throws IOException { if (r1 == r2) { return; // Nothing to do }// w w w. ja v a2s .c o m // Flip them around, make sure r1 is always < r2 if (r1 < r2) { this.swap(r2, r1); } final long p = this.getPosition(); // remember current position // 1st aquire the buffers for both records this.setPosition(r1); final ByteBuffer b1 = BufferUtils.slice(this.next()); // Remember the buffer this.setPosition(r2); final ByteBuffer b2 = BufferUtils.slice(this.next()); // Remember the buffer // Replace r2 first, as its replacement won't affect position of r1 this.setPosition(r2); this.replace(b1); // Lastly replace r1, it might affect r2 position, but r2 has already been // replaced, so no big deal this.setPosition(r1); this.replace(b2); /* * Try to get back to roughly the same position, the position might only * change if r1 < position < r2 and if r1.length != r2.length, otherwise the * position should still end up on same record's start position */ this.seek(p); this.autoflush.autoflushChange(b1.capacity() + b2.capacity()); }
From source file:org.apache.hadoop.hdfs.server.datanode.DWRRBlockReceiver.java
/** * Receives and processes a packet. It can contain many chunks. * returns the number of data bytes that the packet has. */// w ww. j av a2 s.com private int receivePacket() throws IOException { // read the next packet packetReceiver.receiveNextPacket(in); PacketHeader header = packetReceiver.getHeader(); if (LOG.isDebugEnabled()) { LOG.debug("Receiving one packet for block " + block + ": " + header); } // Sanity check the header if (header.getOffsetInBlock() > replicaInfo.getNumBytes()) { throw new IOException("Received an out-of-sequence packet for " + block + "from " + inAddr + " at offset " + header.getOffsetInBlock() + ". Expecting packet starting at " + replicaInfo.getNumBytes()); } if (header.getDataLen() < 0) { throw new IOException("Got wrong length during writeBlock(" + block + ") from " + inAddr + " at offset " + header.getOffsetInBlock() + ": " + header.getDataLen()); } long offsetInBlock = header.getOffsetInBlock(); long seqno = header.getSeqno(); boolean lastPacketInBlock = header.isLastPacketInBlock(); int len = header.getDataLen(); boolean syncBlock = header.getSyncBlock(); // avoid double sync'ing on close if (syncBlock && lastPacketInBlock) { this.syncOnClose = false; } // update received bytes long firstByteInBlock = offsetInBlock; offsetInBlock += len; if (replicaInfo.getNumBytes() < offsetInBlock) { replicaInfo.setNumBytes(offsetInBlock); } // put in queue for pending acks, unless sync was requested if (responder != null && !syncBlock && !shouldVerifyChecksum()) { ((PacketResponder) responder.getRunnable()).enqueue(seqno, lastPacketInBlock, offsetInBlock, Status.SUCCESS); } //First write the packet to the mirror: if (mirrorOut != null && !mirrorError) { try { long begin = Time.monotonicNow(); packetReceiver.mirrorPacketTo(mirrorOut); mirrorOut.flush(); long duration = Time.monotonicNow() - begin; if (duration > datanodeSlowLogThresholdMs) { LOG.warn("Slow DWRRBlockReceiver write packet to mirror took " + duration + "ms (threshold=" + datanodeSlowLogThresholdMs + "ms)"); } } catch (IOException e) { handleMirrorOutError(e); } } ByteBuffer dataBuf = packetReceiver.getDataSlice(); ByteBuffer checksumBuf = packetReceiver.getChecksumSlice(); if (lastPacketInBlock || len == 0) { if (LOG.isDebugEnabled()) { LOG.debug("Receiving an empty packet or the end of the block " + block); } // sync block if requested // if (syncBlock) { // flushOrSync(true); // } } else { int checksumLen = ((len + bytesPerChecksum - 1) / bytesPerChecksum) * checksumSize; if (checksumBuf.capacity() != checksumLen) { throw new IOException("Length of checksums in packet " + checksumBuf.capacity() + " does not match calculated checksum " + "length " + checksumLen); } if (shouldVerifyChecksum()) { try { verifyChunks(dataBuf, checksumBuf); } catch (IOException ioe) { // checksum error detected locally. there is no reason to continue. if (responder != null) { try { ((PacketResponder) responder.getRunnable()).enqueue(seqno, lastPacketInBlock, offsetInBlock, Status.ERROR_CHECKSUM); // Wait until the responder sends back the response // and interrupt this thread. Thread.sleep(3000); } catch (InterruptedException e) { } } throw new IOException("Terminating due to a checksum error." + ioe); } if (needsChecksumTranslation) { // overwrite the checksums in the packet buffer with the // appropriate polynomial for the disk storage. translateChunks(dataBuf, checksumBuf); } } // by this point, the data in the buffer uses the disk checksum try { long onDiskLen = replicaInfo.getBytesOnDisk(); if (onDiskLen < offsetInBlock) { //finally write to the disk : // if (onDiskLen % bytesPerChecksum != 0) { // // prepare to overwrite last checksum // adjustCrcFilePosition(); // } // If this is a partial chunk, then read in pre-existing checksum if (firstByteInBlock % bytesPerChecksum != 0) { LOG.info("Packet starts at " + firstByteInBlock + " for " + block + " which is not a multiple of bytesPerChecksum " + bytesPerChecksum); long offsetInChecksum = BlockMetadataHeader.getHeaderSize() + onDiskLen / bytesPerChecksum * checksumSize; computePartialChunkCrc(onDiskLen, offsetInChecksum, bytesPerChecksum); } int startByteToDisk = (int) (onDiskLen - firstByteInBlock) + dataBuf.arrayOffset() + dataBuf.position(); int numBytesToDisk = (int) (offsetInBlock - onDiskLen); toBeWritten.add(new DWRRWriteRequest(dataBuf.array().clone(), startByteToDisk, numBytesToDisk, checksumBuf.duplicate(), checksumBuf.array().clone(), checksumLen, len, offsetInBlock, syncBlock, lastPacketInBlock)); ChunkChecksum last = replicaInfo.getLastChecksumAndDataLen(); if (offsetInBlock % bytesPerChecksum != 0) { LOG.error("CAMAMILLA " + this + " mod onDiskLen petara offsetInBlock " + offsetInBlock + " bytesPerChecksum " + bytesPerChecksum); // TODO TODO log } replicaInfo.setLastChecksumAndDataLen(offsetInBlock, last.getChecksum()); } } catch (IOException iex) { datanode.checkDiskError(); throw iex; } } // if sync was requested, put in queue for pending acks here // (after the fsync finished) if (responder != null && (syncBlock || shouldVerifyChecksum())) { LOG.info("CAMAMILLA " + this + " PacketResponder enqueue ack al llegir de xarxa 1"); // TODO TODO log ((PacketResponder) responder.getRunnable()).enqueue(seqno, lastPacketInBlock, offsetInBlock, Status.SUCCESS); } if (throttler != null) { // throttle I/O throttler.throttle(len); } return lastPacketInBlock ? -1 : len; }
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl.java
private void materializeX509(ApplicationId appId, String user, String userFolder, ByteBuffer keyStore, String keyStorePass, ByteBuffer trustStore, String trustStorePass) throws IOException { if (context.getApplications().containsKey(appId)) { LOG.debug("Application reference exists, certificates should have " + "already been materialized"); return;//from w w w. j a va 2s . c o m } if (keyStore == null || trustStore == null || (keyStore.capacity() == 0) || (trustStore.capacity() == 0)) { throw new IOException( "RPC TLS is enabled but keyStore or trustStore " + "supplied is either null or empty"); } // ApplicationMasters will also call startContainers() through NMClient // In that case there will be no password set for keystore and truststore // Only RM will set these values when launching AM container through the // AMLauncher if (keyStorePass != null && !keyStorePass.isEmpty() && trustStorePass != null && !trustStorePass.isEmpty()) { try { context.getCertificateLocalizationService().materializeCertificates(user, appId.toString(), userFolder, keyStore, keyStorePass, trustStore, trustStorePass); } catch (InterruptedException ex) { LOG.error(ex, ex); throw new IOException(ex); } } }
From source file:com.koda.integ.hbase.storage.FileExtStorage.java
/** * Tries to store batch of blocks into a current buffer. * * @param buf the buf/*from w ww . j a v a2 s . co m*/ * @return the list */ private List<StorageHandle> storeDataNoReleaseLock(ByteBuffer buf) { List<StorageHandle> handles = new ArrayList<StorageHandle>(); writeLock.writeLock().lock(); try { if (activeBuffer.get() == null) { return null; } int size = buf.getInt(0); long off = bufferOffset.get(); if (off + size > bufferSize) { return null; } long currentFileLength = currentFileOffsetForWrites.get(); if (bufferOffset.get() == 0 && currentFileLength + bufferSize > fileSizeLimit) { // previous buffer was flushed currentFileOffsetForWrites.set(0); maxIdForWrites.incrementAndGet(); } buf.position(4); while (buf.position() < size + 4) { buf.limit(buf.capacity()); int pos = buf.position(); int blockSize = buf.getInt(); buf.position(pos); buf.limit(pos + 4 + blockSize); activeBuffer.get().put(buf); FileStorageHandle fsh = new FileStorageHandle(maxIdForWrites.get(), (int) (currentFileOffsetForWrites.get()), blockSize); handles.add(fsh); // Increase offset in current file for writes; currentFileOffsetForWrites.addAndGet(blockSize + 4); bufferOffset.getAndAdd(blockSize + 4); } return handles; } finally { WriteLock lock = writeLock.writeLock(); if (lock.isHeldByCurrentThread()) { lock.unlock(); } } }
From source file:org.opendaylight.lispflowmapping.implementation.serializer.MapRegisterSerializer.java
public MapRegister deserialize(ByteBuffer registerBuffer) { try {/* ww w . j a va 2s.c o m*/ MapRegisterBuilder builder = new MapRegisterBuilder(); builder.setEidToLocatorRecord(new ArrayList<EidToLocatorRecord>()); byte typeAndFlags = registerBuffer.get(); boolean xtrSiteIdPresent = ByteUtil.extractBit(typeAndFlags, Flags.XTRSITEID); builder.setProxyMapReply(ByteUtil.extractBit(typeAndFlags, Flags.PROXY)); builder.setXtrSiteIdPresent(xtrSiteIdPresent); registerBuffer.position(registerBuffer.position() + Length.RES); builder.setWantMapNotify(ByteUtil.extractBit(registerBuffer.get(), Flags.WANT_MAP_REPLY)); byte recordCount = (byte) ByteUtil.getUnsignedByte(registerBuffer); builder.setNonce(registerBuffer.getLong()); builder.setKeyId(registerBuffer.getShort()); short authenticationLength = registerBuffer.getShort(); byte[] authenticationData = new byte[authenticationLength]; registerBuffer.get(authenticationData); builder.setAuthenticationData(authenticationData); for (int i = 0; i < recordCount; i++) { builder.getEidToLocatorRecord().add(new EidToLocatorRecordBuilder( EidToLocatorRecordSerializer.getInstance().deserialize(registerBuffer)).build()); } if (xtrSiteIdPresent) { byte[] xtrId = new byte[Length.XTRID_SIZE]; registerBuffer.get(xtrId); byte[] siteId = new byte[Length.SITEID_SIZE]; registerBuffer.get(siteId); builder.setXtrId(xtrId); builder.setSiteId(siteId); } registerBuffer.limit(registerBuffer.position()); byte[] mapRegisterBytes = new byte[registerBuffer.position()]; registerBuffer.position(0); registerBuffer.get(mapRegisterBytes); return builder.build(); } catch (RuntimeException re) { throw new LispSerializationException( "Couldn't deserialize Map-Register (len=" + registerBuffer.capacity() + ")", re); } }
From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffReader.java
private TaggedImage readTaggedImage(IFDData data) throws IOException { ByteBuffer pixelBuffer = ByteBuffer.allocate((int) data.bytesPerImage); ByteBuffer mdBuffer = ByteBuffer.allocate((int) data.mdLength); fileChannel_.read(pixelBuffer, data.pixelOffset); fileChannel_.read(mdBuffer, data.mdOffset); JSONObject md = null;/*ww w .ja v a2 s . co m*/ try { md = new JSONObject(getString(mdBuffer)); } catch (JSONException ex) { ReportingUtils.logError("Error reading image metadata from file"); } if (byteDepth_ == 0) { getRGBAndByteDepth(md); } if (rgb_) { if (byteDepth_ == 1) { byte[] pixels = new byte[(int) (4 * data.bytesPerImage / 3)]; int i = 0; for (byte b : pixelBuffer.array()) { pixels[i] = b; i++; if ((i + 1) % 4 == 0) { pixels[i] = 0; i++; } } return new TaggedImage(pixels, md); } else { short[] pixels = new short[(int) (2 * (data.bytesPerImage / 3))]; int i = 0; while (i < pixels.length) { pixels[i] = pixelBuffer.getShort(2 * ((i / 4) * 3 + (i % 4))); i++; if ((i + 1) % 4 == 0) { pixels[i] = 0; i++; } } return new TaggedImage(pixels, md); } } else { if (byteDepth_ == 1) { return new TaggedImage(pixelBuffer.array(), md); } else { short[] pix = new short[pixelBuffer.capacity() / 2]; for (int i = 0; i < pix.length; i++) { pix[i] = pixelBuffer.getShort(i * 2); } return new TaggedImage(pix, md); } } }
From source file:com.aionemu.gameserver.services.LegionService.java
/** * @param player/*from w ww .j a va2 s . c om*/ * @param legionEmblem * @param legionId * @param legionName */ public void sendEmblemData(Player player, LegionEmblem legionEmblem, int legionId, String legionName) { PacketSendUtility.sendPacket(player, new SM_LEGION_SEND_EMBLEM(legionId, legionEmblem.getEmblemId(), legionEmblem.getColor_r(), legionEmblem.getColor_g(), legionEmblem.getColor_b(), legionName, legionEmblem.getEmblemType(), legionEmblem.getCustomEmblemData().length)); ByteBuffer buf = ByteBuffer.allocate(legionEmblem.getCustomEmblemData().length); buf.put(legionEmblem.getCustomEmblemData()).position(0); log.debug("legionEmblem size: " + buf.capacity() + " bytes"); int maxSize = 7993; int currentSize; byte[] bytes; do { log.debug("legionEmblem data position: " + buf.position()); currentSize = buf.capacity() - buf.position(); log.debug("legionEmblem data remaining capacity: " + currentSize + " bytes"); if (currentSize >= maxSize) { bytes = new byte[maxSize]; for (int i = 0; i < maxSize; i++) { bytes[i] = buf.get(); } log.debug("legionEmblem data send size: " + (bytes.length) + " bytes"); PacketSendUtility.sendPacket(player, new SM_LEGION_SEND_EMBLEM_DATA(maxSize, bytes)); } else { bytes = new byte[currentSize]; for (int i = 0; i < currentSize; i++) { bytes[i] = buf.get(); } log.debug("legionEmblem data send size: " + (bytes.length) + " bytes"); PacketSendUtility.sendPacket(player, new SM_LEGION_SEND_EMBLEM_DATA(currentSize, bytes)); } } while (buf.capacity() != buf.position()); }
From source file:org.bimserver.collada.ColladaSerializer.java
private void setGeometry(PrintWriter out, IfcProduct ifcProductObject, String material) throws RenderEngineException, SerializerException { // Mostly just skips IfcOpeningElements which one would probably not want to end up in the Collada file. if (ifcProductObject instanceof IfcFeatureElementSubtraction) return;/*from w w w. j av a2 s. c om*/ // GeometryInfo geometryInfo = ifcProductObject.getGeometry(); if (geometryInfo != null && geometryInfo.getTransformation() != null) { GeometryData geometryData = geometryInfo.getData(); ByteBuffer indicesBuffer = ByteBuffer.wrap(geometryData.getIndices()); indicesBuffer.order(ByteOrder.LITTLE_ENDIAN); // TODO: In Blender (3d modeling tool) and Three.js, normals are ignored in favor of vertex order. The incoming geometry seems to be in order 0 1 2 when it needs to be in 1 0 2. Need more test cases. // Failing order: (0, 1050, 2800), (0, 1050, 3100), (3580, 1050, 3100) // Successful order: (0, 1050, 3100), (0, 1050, 2800), (3580, 1050, 3100) List<Integer> list = new ArrayList<Integer>(); while (indicesBuffer.hasRemaining()) list.add(indicesBuffer.getInt()); indicesBuffer.rewind(); for (int i = 0; i < list.size(); i += 3) { Integer first = list.get(i); Integer next = list.get(i + 1); list.set(i, next); list.set(i + 1, first); } // Positions the X or the Y or the Z of (X, Y, Z). ByteBuffer positionsBuffer = ByteBuffer.wrap(geometryData.getVertices()); positionsBuffer.order(ByteOrder.LITTLE_ENDIAN); // Do pass to find highest Z for considered objects. while (positionsBuffer.hasRemaining()) { float x = positionsBuffer.getFloat(); float y = positionsBuffer.getFloat(); float z = positionsBuffer.getFloat(); // X if (x > highestObserved.x()) highestObserved.x(x); else if (x < lowestObserved.x()) lowestObserved.x(x); // Y if (y > highestObserved.y()) highestObserved.y(y); else if (y < lowestObserved.y()) lowestObserved.y(y); // Z if (z > highestObserved.z()) highestObserved.z(z); else if (z < lowestObserved.z()) lowestObserved.z(z); } positionsBuffer.rewind(); // ByteBuffer normalsBuffer = ByteBuffer.wrap(geometryData.getNormals()); normalsBuffer.order(ByteOrder.LITTLE_ENDIAN); // Create a geometry identification number in the form of: geom-320450 long oid = ifcProductObject.getOid(); String id = String.format("geom-%d", oid); // If the material doesn't exist in the converted map, add it. if (!converted.containsKey(material)) converted.put(material, new HashSet<IfcProduct>()); // Add the current IfcProduct to the appropriate entry in the material map. converted.get(material).add(ifcProductObject); // Name for geometry. String name = (ifcProductObject.getGlobalId() == null) ? "[NO_GUID]" : ifcProductObject.getGlobalId(); // Counts. int vertexComponentsTotal = positionsBuffer.capacity() / 4, normalComponentsTotal = normalsBuffer.capacity() / 4; int verticesCount = positionsBuffer.capacity() / 12, normalsCount = normalsBuffer.capacity() / 12, triangleCount = indicesBuffer.capacity() / 12; // Vertex scalars as one long string: 4.05 2 1 55.0 34.01 2 String stringPositionScalars = byteBufferToFloatingPointSpaceDelimitedString(positionsBuffer); // Normal scalars as one long string: 4.05 2 1 55.0 34.01 2 String stringNormalScalars = byteBufferToFloatingPointSpaceDelimitedString(normalsBuffer); //doubleBufferToFloatingPointSpaceDelimitedString(flippedNormalsBuffer); // Vertex indices as one long string: 1 0 2 0 3 2 5 4 6 String stringIndexScalars = listToSpaceDelimitedString(list, intFormat); // Write geometry block for this IfcProduct (i.e. IfcRoof, IfcSlab, etc). out.println(" <geometry id=\"" + id + "\" name=\"" + name + "\">"); out.println(" <mesh>"); out.println(" <source id=\"positions-" + oid + "\" name=\"positions-" + oid + "\">"); out.println(" <float_array id=\"positions-array-" + oid + "\" count=\"" + vertexComponentsTotal + "\">" + stringPositionScalars + "</float_array>"); out.println(" <technique_common>"); out.println(" <accessor count=\"" + verticesCount + "\" offset=\"0\" source=\"#positions-array-" + oid + "\" stride=\"3\">"); out.println(" <param name=\"X\" type=\"float\"></param>"); out.println(" <param name=\"Y\" type=\"float\"></param>"); out.println(" <param name=\"Z\" type=\"float\"></param>"); out.println(" </accessor>"); out.println(" </technique_common>"); out.println(" </source>"); out.println(" <source id=\"normals-" + oid + "\" name=\"normals-" + oid + "\">"); out.println(" <float_array id=\"normals-array-" + oid + "\" count=\"" + normalComponentsTotal + "\">" + stringNormalScalars + "</float_array>"); out.println(" <technique_common>"); out.println(" <accessor count=\"" + normalsCount + "\" offset=\"0\" source=\"#normals-array-" + oid + "\" stride=\"3\">"); out.println(" <param name=\"X\" type=\"float\"></param>"); out.println(" <param name=\"Y\" type=\"float\"></param>"); out.println(" <param name=\"Z\" type=\"float\"></param>"); out.println(" </accessor>"); out.println(" </technique_common>"); out.println(" </source>"); out.println(" <vertices id=\"vertices-" + oid + "\">"); out.println(" <input semantic=\"POSITION\" source=\"#positions-" + oid + "\"/>"); out.println(" <input semantic=\"NORMAL\" source=\"#normals-" + oid + "\"/>"); out.println(" </vertices>"); out.println(" <triangles count=\"" + triangleCount + "\" material=\"Material-" + oid + "\">"); out.println(" <input offset=\"0\" semantic=\"VERTEX\" source=\"#vertices-" + oid + "\"/>"); out.println(" <p>" + stringIndexScalars + "</p>"); out.println(" </triangles>"); out.println(" </mesh>"); out.println(" </geometry>"); } }
From source file:org.opendaylight.lispflowmapping.lisp.serializer.MapRequestSerializer.java
public ByteBuffer serialize(MapRequest mapRequest) { int size = Length.HEADER_SIZE; if (mapRequest.getSourceEid() != null && mapRequest.getSourceEid().getEid() != null) { size += LispAddressSerializer.getInstance().getAddressSize(mapRequest.getSourceEid().getEid()); } else {//w w w .j av a 2 s. c o m size += 2; } if (mapRequest.getItrRloc() != null) { for (ItrRloc address : mapRequest.getItrRloc()) { size += LispAddressSerializer.getInstance().getAddressSize(address.getRloc()); } } if (mapRequest.getEidItem() != null) { for (EidItem record : mapRequest.getEidItem()) { size += 2 + LispAddressSerializer.getInstance().getAddressSize(record.getEid()); } } ByteBuffer requestBuffer = ByteBuffer.allocate(size); requestBuffer.put((byte) ((byte) (MessageType.MapRequest.getIntValue() << 4) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isAuthoritative()), Flags.AUTHORITATIVE) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isMapDataPresent()), Flags.MAP_DATA_PRESENT) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isProbe()), Flags.PROBE) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isSmr()), Flags.SMR))); requestBuffer.put((byte) (ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isPitr()), Flags.PITR) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isSmrInvoked()), Flags.SMR_INVOKED))); if (mapRequest.getItrRloc() != null) { int IRC = mapRequest.getItrRloc().size(); if (IRC > 0) { IRC--; } requestBuffer.put((byte) (IRC)); } else { requestBuffer.put((byte) 0); } if (mapRequest.getEidItem() != null) { requestBuffer.put((byte) mapRequest.getEidItem().size()); } else { requestBuffer.put((byte) 0); } requestBuffer.putLong(NumberUtil.asLong(mapRequest.getNonce())); if (mapRequest.getSourceEid() != null && mapRequest.getSourceEid().getEid() != null) { LispAddressSerializer.getInstance().serialize(requestBuffer, mapRequest.getSourceEid().getEid()); } else { requestBuffer.putShort((short) 0); } if (mapRequest.getItrRloc() != null) { for (ItrRloc address : mapRequest.getItrRloc()) { LispAddressSerializer.getInstance().serialize(requestBuffer, address.getRloc()); } } if (mapRequest.getEidItem() != null) { for (EidItem record : mapRequest.getEidItem()) { requestBuffer.put((byte) 0); requestBuffer.put((byte) MaskUtil.getMaskForAddress(record.getEid().getAddress())); LispAddressSerializer.getInstance().serialize(requestBuffer, record.getEid()); } } if (mapRequest.getMapReply() != null) { ByteBuffer replyBuffer = ByteBuffer.allocate(MappingRecordSerializer.getInstance() .getSerializationSize(mapRequest.getMapReply().getMappingRecord())); MappingRecordSerializer.getInstance().serialize(replyBuffer, mapRequest.getMapReply().getMappingRecord()); ByteBuffer combinedBuffer = ByteBuffer.allocate(requestBuffer.capacity() + replyBuffer.capacity()); combinedBuffer.put(requestBuffer.array()); combinedBuffer.put(replyBuffer.array()); return combinedBuffer; } return requestBuffer; }
From source file:org.opendaylight.lispflowmapping.lisp.serializer.MapNotifySerializer.java
public MapNotify deserialize(ByteBuffer notifyBuffer) { try {/*from w ww . j av a 2 s.c o m*/ MapNotifyBuilder builder = new MapNotifyBuilder(); builder.setMappingRecordItem(new ArrayList<MappingRecordItem>()); byte typeAndFlags = notifyBuffer.get(); boolean xtrSiteIdPresent = ByteUtil.extractBit(typeAndFlags, Flags.XTRSITEID); builder.setXtrSiteIdPresent(xtrSiteIdPresent); notifyBuffer.position(notifyBuffer.position() + Length.RES); builder.setMergeEnabled(ByteUtil.extractBit(notifyBuffer.get(), Flags.MERGE_ENABLED)); byte recordCount = (byte) ByteUtil.getUnsignedByte(notifyBuffer); builder.setNonce(notifyBuffer.getLong()); builder.setKeyId(notifyBuffer.getShort()); short authenticationLength = notifyBuffer.getShort(); byte[] authenticationData = new byte[authenticationLength]; notifyBuffer.get(authenticationData); builder.setAuthenticationData(authenticationData); if (xtrSiteIdPresent) { List<MappingRecordBuilder> mrbs = new ArrayList<MappingRecordBuilder>(); for (int i = 0; i < recordCount; i++) { mrbs.add(MappingRecordSerializer.getInstance().deserializeToBuilder(notifyBuffer)); } byte[] xtrIdBuf = new byte[MapRegisterSerializer.Length.XTRID_SIZE]; notifyBuffer.get(xtrIdBuf); XtrId xtrId = new XtrId(xtrIdBuf); byte[] siteIdBuf = new byte[MapRegisterSerializer.Length.SITEID_SIZE]; notifyBuffer.get(siteIdBuf); SiteId siteId = new SiteId(siteIdBuf); builder.setXtrId(xtrId); builder.setSiteId(siteId); for (MappingRecordBuilder mrb : mrbs) { mrb.setXtrId(xtrId); mrb.setSiteId(siteId); builder.getMappingRecordItem() .add(new MappingRecordItemBuilder().setMappingRecord(mrb.build()).build()); } } else { for (int i = 0; i < recordCount; i++) { builder.getMappingRecordItem() .add(new MappingRecordItemBuilder() .setMappingRecord( MappingRecordSerializer.getInstance().deserialize(notifyBuffer)) .build()); } } notifyBuffer.limit(notifyBuffer.position()); return builder.build(); } catch (RuntimeException re) { throw new LispSerializationException( "Couldn't deserialize Map-Notify (len=" + notifyBuffer.capacity() + ")", re); } }