List of usage examples for java.nio ByteBuffer getInt
public abstract int getInt();
From source file:com.koda.integ.hbase.blockcache.OffHeapBlockCacheOld.java
/** * Read external with codec.//from w ww. jav a 2 s . c o m * * @param blockName the block name * @return the cacheable * @throws IOException Signals that an I/O exception has occurred. */ private Cacheable readExternalWithCodec(String blockName) throws IOException { if (overflowExtEnabled == false) return null; // Check if we have already this block in external storage cache try { // We use 16 - byte hash for external storage cache byte[] hashed = Utils.hash128(blockName); StorageHandle handle = (StorageHandle) extStorageCache.get(hashed); if (handle == null) return null; ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress().getBuffer(); SerDe serde = extStorageCache.getSerDe(); @SuppressWarnings("unused") Codec codec = extStorageCache.getCompressionCodec(); buffer.clear(); StorageHandle newHandle = storage.getData(handle, buffer); if (buffer.position() > 0) buffer.flip(); int size = buffer.getInt(); if (size == 0) return null; // Skip key int keySize = buffer.getInt(); buffer.position(8 + keySize); boolean inMemory = buffer.get() == (byte) 1; //buffer.position(5); buffer.limit(size + 4); Cacheable obj = (Cacheable) serde.readCompressed(buffer/*, codec*/); if (inMemory) { permGenCache.put(blockName, obj); } else { tenGenCache.put(blockName, obj); } if (newHandle.equals(handle) == false) { extStorageCache.put(hashed, newHandle); } return obj; } catch (NativeMemoryException e) { throw new IOException(e); } }
From source file:com.yobidrive.diskmap.buckets.BucketTableManager.java
private void initializeBucketTableFromLastCommittedBucketFile() throws BucketTableManagerException { FileInputStream tableStream = null; FileChannel fileChannel = null; try {//from w w w . j av a 2 s . c o m File latestCommittedFile = getLatestCommitedFile(); if (latestCommittedFile != null) { tableStream = new FileInputStream(latestCommittedFile); fileChannel = tableStream.getChannel(); ByteBuffer buffer = ByteBuffer.allocate(HEADERSIZE); fileChannel.position(0L); int read = fileChannel.read(buffer); if (read < HEADERSIZE) { fileChannel.close(); throw new BucketTableManagerException( "Wrong bucket table header size: " + read + "/" + HEADERSIZE); } // Check content of header. Start with Big Endian (default for Java) buffer.rewind(); byteOrder = ByteOrder.BIG_ENDIAN; buffer.order(byteOrder); int magic = buffer.getInt(); if (magic == MAGICSTART_BADENDIAN) { byteOrder = ByteOrder.LITTLE_ENDIAN; buffer.order(byteOrder); } else if (magic != MAGICSTART) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Read number of buckets long headerMapSize = buffer.getLong(); // Read checkPoint NeedlePointer includedCheckpoint = new NeedlePointer(); includedCheckpoint.getNeedlePointerFromBuffer(buffer); // Read second magic number magic = buffer.getInt(); if (magic != MAGICEND) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Check number of buckets against requested map size if (headerMapSize != mapSize) { // Map size does not match fileChannel.close(); throw new BucketTableManagerException( "Requested map size " + mapSize + " does not match header map size " + headerMapSize); } // Sets initial checkpoint bucketTable.setInitialCheckPoint(includedCheckpoint); // Now reads all entries logger.info("Hot start: loading buckets..."); for (int i = 0; i < nbBuffers; i++) { bucketTable.prepareBufferForReading(i); read = fileChannel.read(bucketTable.getBuffer(i)); if (read < bucketTable.getBuffer(i).limit()) throw new BucketTableManagerException("Incomplete bucket table file " + latestCommittedFile.getName() + ", expected " + mapSize + HEADERSIZE); //else // logger.info("Hot start: loaded "+(i+1)*entriesPerBuffer+" buckets"+((i<(nbBuffers-1))?"...":"")) ; } // Checks second magic marker buffer = ByteBuffer.allocate(NeedleLogInfo.INFOSIZE); buffer.rewind(); buffer.limit(INTSIZE); if (fileChannel.read(buffer) < INTSIZE) throw new BucketTableManagerException( "Incomplete bucket table file, missing secong magic number " + latestCommittedFile.getName()); buffer.rewind(); magic = buffer.getInt(); if (magic != MAGICSTART) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Now reads clean counters while (true) { buffer.rewind(); buffer.limit(NeedleLogInfo.INFOSIZE); read = fileChannel.read(buffer); if (read > 0 && read < NeedleLogInfo.INFOSIZE) throw new BucketTableManagerException("Incomplete bucket table file, log info too short " + latestCommittedFile.getName() + ", expected " + mapSize + HEADERSIZE); if (read <= 0) break; else { NeedleLogInfo nli = new NeedleLogInfo(useAverage); buffer.rewind(); nli.getNeedleLogInfo(buffer); logInfoPerLogNumber.put(new Integer(nli.getNeedleFileNumber()), nli); } } logger.info("Hot start: loaded " + (nbBuffers * entriesPerBuffer) + " buckets"); } else { // Empty file bucketTable.setInitialCheckPoint(new NeedlePointer()); bucketTable.format(); } } catch (IOException ie) { throw new BucketTableManagerException("Failed initializing bucket table", ie); } catch (BufferUnderflowException bue) { throw new BucketTableManagerException("Bucket table too short", bue); } finally { if (fileChannel != null) { try { fileChannel.close(); } catch (IOException ex) { throw new BucketTableManagerException("Error while closing file channel", ex); } } } }
From source file:org.opendaylight.controller.protocol_plugin.openflow.vendorextension.v6extension.V6Match.java
private int getNetworkMaskPrefixLength(byte[] netMask) { ByteBuffer nm = ByteBuffer.wrap(netMask); int trailingZeros = Integer.numberOfTrailingZeros(nm.getInt()); return 32 - trailingZeros; }
From source file:org.bimserver.collada.ColladaSerializer.java
private void setGeometry(PrintWriter out, IfcProduct ifcProductObject, String material) throws RenderEngineException, SerializerException { // Mostly just skips IfcOpeningElements which one would probably not want to end up in the Collada file. if (ifcProductObject instanceof IfcFeatureElementSubtraction) return;// w w w. j a v a 2 s . c o m // GeometryInfo geometryInfo = ifcProductObject.getGeometry(); if (geometryInfo != null && geometryInfo.getTransformation() != null) { GeometryData geometryData = geometryInfo.getData(); ByteBuffer indicesBuffer = ByteBuffer.wrap(geometryData.getIndices()); indicesBuffer.order(ByteOrder.LITTLE_ENDIAN); // TODO: In Blender (3d modeling tool) and Three.js, normals are ignored in favor of vertex order. The incoming geometry seems to be in order 0 1 2 when it needs to be in 1 0 2. Need more test cases. // Failing order: (0, 1050, 2800), (0, 1050, 3100), (3580, 1050, 3100) // Successful order: (0, 1050, 3100), (0, 1050, 2800), (3580, 1050, 3100) List<Integer> list = new ArrayList<Integer>(); while (indicesBuffer.hasRemaining()) list.add(indicesBuffer.getInt()); indicesBuffer.rewind(); for (int i = 0; i < list.size(); i += 3) { Integer first = list.get(i); Integer next = list.get(i + 1); list.set(i, next); list.set(i + 1, first); } // Positions the X or the Y or the Z of (X, Y, Z). ByteBuffer positionsBuffer = ByteBuffer.wrap(geometryData.getVertices()); positionsBuffer.order(ByteOrder.LITTLE_ENDIAN); // Do pass to find highest Z for considered objects. while (positionsBuffer.hasRemaining()) { float x = positionsBuffer.getFloat(); float y = positionsBuffer.getFloat(); float z = positionsBuffer.getFloat(); // X if (x > highestObserved.x()) highestObserved.x(x); else if (x < lowestObserved.x()) lowestObserved.x(x); // Y if (y > highestObserved.y()) highestObserved.y(y); else if (y < lowestObserved.y()) lowestObserved.y(y); // Z if (z > highestObserved.z()) highestObserved.z(z); else if (z < lowestObserved.z()) lowestObserved.z(z); } positionsBuffer.rewind(); // ByteBuffer normalsBuffer = ByteBuffer.wrap(geometryData.getNormals()); normalsBuffer.order(ByteOrder.LITTLE_ENDIAN); // Create a geometry identification number in the form of: geom-320450 long oid = ifcProductObject.getOid(); String id = String.format("geom-%d", oid); // If the material doesn't exist in the converted map, add it. if (!converted.containsKey(material)) converted.put(material, new HashSet<IfcProduct>()); // Add the current IfcProduct to the appropriate entry in the material map. converted.get(material).add(ifcProductObject); // Name for geometry. String name = (ifcProductObject.getGlobalId() == null) ? "[NO_GUID]" : ifcProductObject.getGlobalId(); // Counts. int vertexComponentsTotal = positionsBuffer.capacity() / 4, normalComponentsTotal = normalsBuffer.capacity() / 4; int verticesCount = positionsBuffer.capacity() / 12, normalsCount = normalsBuffer.capacity() / 12, triangleCount = indicesBuffer.capacity() / 12; // Vertex scalars as one long string: 4.05 2 1 55.0 34.01 2 String stringPositionScalars = byteBufferToFloatingPointSpaceDelimitedString(positionsBuffer); // Normal scalars as one long string: 4.05 2 1 55.0 34.01 2 String stringNormalScalars = byteBufferToFloatingPointSpaceDelimitedString(normalsBuffer); //doubleBufferToFloatingPointSpaceDelimitedString(flippedNormalsBuffer); // Vertex indices as one long string: 1 0 2 0 3 2 5 4 6 String stringIndexScalars = listToSpaceDelimitedString(list, intFormat); // Write geometry block for this IfcProduct (i.e. IfcRoof, IfcSlab, etc). out.println(" <geometry id=\"" + id + "\" name=\"" + name + "\">"); out.println(" <mesh>"); out.println(" <source id=\"positions-" + oid + "\" name=\"positions-" + oid + "\">"); out.println(" <float_array id=\"positions-array-" + oid + "\" count=\"" + vertexComponentsTotal + "\">" + stringPositionScalars + "</float_array>"); out.println(" <technique_common>"); out.println(" <accessor count=\"" + verticesCount + "\" offset=\"0\" source=\"#positions-array-" + oid + "\" stride=\"3\">"); out.println(" <param name=\"X\" type=\"float\"></param>"); out.println(" <param name=\"Y\" type=\"float\"></param>"); out.println(" <param name=\"Z\" type=\"float\"></param>"); out.println(" </accessor>"); out.println(" </technique_common>"); out.println(" </source>"); out.println(" <source id=\"normals-" + oid + "\" name=\"normals-" + oid + "\">"); out.println(" <float_array id=\"normals-array-" + oid + "\" count=\"" + normalComponentsTotal + "\">" + stringNormalScalars + "</float_array>"); out.println(" <technique_common>"); out.println(" <accessor count=\"" + normalsCount + "\" offset=\"0\" source=\"#normals-array-" + oid + "\" stride=\"3\">"); out.println(" <param name=\"X\" type=\"float\"></param>"); out.println(" <param name=\"Y\" type=\"float\"></param>"); out.println(" <param name=\"Z\" type=\"float\"></param>"); out.println(" </accessor>"); out.println(" </technique_common>"); out.println(" </source>"); out.println(" <vertices id=\"vertices-" + oid + "\">"); out.println(" <input semantic=\"POSITION\" source=\"#positions-" + oid + "\"/>"); out.println(" <input semantic=\"NORMAL\" source=\"#normals-" + oid + "\"/>"); out.println(" </vertices>"); out.println(" <triangles count=\"" + triangleCount + "\" material=\"Material-" + oid + "\">"); out.println(" <input offset=\"0\" semantic=\"VERTEX\" source=\"#vertices-" + oid + "\"/>"); out.println(" <p>" + stringIndexScalars + "</p>"); out.println(" </triangles>"); out.println(" </mesh>"); out.println(" </geometry>"); } }
From source file:org.midonet.netlink.rtnetlink.Link.java
@Override public void use(ByteBuffer buf, short id) { ByteOrder originalOrder = buf.order(); try {/*from w w w . j a v a 2 s .co m*/ if (!NetlinkMessage.isNested(id)) { switch (id) { case Attr.IFLA_ADDRESS: if (buf.remaining() != 6) { this.mac = null; } else { byte[] rhs = new byte[6]; buf.get(rhs); this.mac = MAC.fromAddress(rhs); } break; case Attr.IFLA_IFNAME: byte[] s = new byte[buf.remaining() - 1]; buf.get(s); this.ifname = new String(s); break; case Attr.IFLA_MASTER: if (buf.remaining() == 4) { this.masterIndex = buf.getInt(); } break; case Attr.IFLA_MTU: if (buf.remaining() != 4) { this.mtu = 0; } else { this.mtu = buf.getInt(); } break; case Attr.IFLA_LINK: if (buf.remaining() != 4) { this.link = this.ifi.index; } else { this.link = buf.getInt(); } break; case Attr.IFLA_LINKINFO: NetlinkMessage.scanNestedAttribute(buf, this); break; default: break; } } else { switch (NetlinkMessage.unnest(id)) { case NestedAttr.LinkInfo.IFLA_INFO_KIND: byte[] s = new byte[buf.remaining() - 1]; buf.get(s); // Since the longest length for the field is 7, netlink // assigns 7 bytes here, by padding with '0's to the // right int zeroIdx = ArrayUtils.indexOf(s, (byte) 0); if (zeroIdx == ArrayUtils.INDEX_NOT_FOUND) { info.kind = new String(s); } else { info.kind = new String(Arrays.copyOf(s, zeroIdx)); } break; case NestedAttr.LinkInfo.IFLA_INFO_DATA: byte[] data = new byte[buf.remaining() - 1]; buf.get(data); info.data = ByteBuffer.wrap(data); break; default: break; } } } finally { buf.order(originalOrder); } }
From source file:org.zaproxy.zap.extension.ascanrulesAlpha.GitMetadata.java
/** * gets a Map of relative file paths to SHA1s using raw Git index file data (which is not * verified here)/*from ww w .j av a 2s. c o m*/ * * @param data the raw binary data from a valid Git index file (Versions 2,3,4 are supported) * @return a Map of relative file paths to SHA1s using raw Git index file data * @todo consider sharing this method between the Git Spider, and the SourceCodeDisclosure * scanner. */ @SuppressWarnings("unused") public Map<String, String> getIndexSha1s(byte[] data) throws Exception { Map<String, String> map = new TreeMap<String, String>(); // wrap up the data, so we can read it.. ByteBuffer dataBuffer = ByteBuffer.wrap(data); byte[] dircArray = new byte[4]; dataBuffer.get(dircArray); int indexFileVersion = dataBuffer.getInt(); // if ( log.isDebugEnabled() ) log.debug("The Git index file version is "+ // indexFileVersion); int indexEntryCount = dataBuffer.getInt(); // if ( log.isDebugEnabled() ) log.debug(indexEntryCount + " entries were found in the Git // index file "); if (indexFileVersion != 2 && indexFileVersion != 3 && indexFileVersion != 4) { throw new Exception( "Only Git Index File versions 2, 3, and 4 are currently supported. Git Index File Version " + indexFileVersion + " was found."); } // for version 4 (and upwards?), we need to know the previous entry name, so store it String previousIndexEntryName = ""; for (int entryIndex = 0; entryIndex < indexEntryCount; entryIndex++) { int entryBytesRead = 0; int indexEntryCtime1 = dataBuffer.getInt(); entryBytesRead += 4; // if ( log.isDebugEnabled() ) log.debug ("Entry "+ entryIndex + " has indexEntryCtime1 // "+ indexEntryCtime1); int indexEntryCtime2 = dataBuffer.getInt(); entryBytesRead += 4; int indexEntryMtime1 = dataBuffer.getInt(); entryBytesRead += 4; int indexEntryMtime2 = dataBuffer.getInt(); entryBytesRead += 4; int indexEntryDev = dataBuffer.getInt(); entryBytesRead += 4; int indexEntryInode = dataBuffer.getInt(); entryBytesRead += 4; int indexEntryMode = dataBuffer.getInt(); entryBytesRead += 4; int indexEntryUid = dataBuffer.getInt(); entryBytesRead += 4; int indexEntryGid = dataBuffer.getInt(); entryBytesRead += 4; int indexEntrySize = dataBuffer.getInt(); entryBytesRead += 4; // if ( log.isDebugEnabled() ) log.debug("Entry "+ entryIndex + " has size "+ // indexEntrySize); // size is unspecified for the entry id, but it seems to be a 40 hex character, SHA-1 // string // stored as 20 bytes, network order byte[] indexEntryIdBuffer = new byte[20]; dataBuffer.get(indexEntryIdBuffer); entryBytesRead += 20; String indexEntrySha1 = Hex.encodeHexString(indexEntryIdBuffer); short indexEntryFlags = dataBuffer.getShort(); entryBytesRead += 2; // if ( log.isDebugEnabled() ) log.debug ("Entry "+ entryIndex + " has flags " + // indexEntryFlags); // mask off all but the least significant 12 bits of the index entry flags to get the // length of the name in bytes int indexEntryNameByteLength = indexEntryFlags & 4095; // if ( log.isDebugEnabled() ) log.debug ("Entry "+ entryIndex + " has a name of length // " + indexEntryNameByteLength); // mask off all but the second most significant 12 bit of the index entry flags to get // the extended flag for the entry int indexEntryExtendedFlag = ((indexEntryFlags & (1 << 14)) >> 14); // if ( log.isDebugEnabled() ) log.debug ("Entry "+ entryIndex + " has an extended flag // of " + indexEntryExtendedFlag); // check that we parsed out the index entry extended flag correctly. // this is more of an assertion than anything. It's already saved my bacon once. if (indexEntryExtendedFlag != 0 && indexEntryExtendedFlag != 1) { throw new Exception("Error parsing out the extended flag for index entry " + entryIndex + ". We got " + indexEntryExtendedFlag); } if (indexFileVersion == 2 && indexEntryExtendedFlag != 0) { throw new Exception( "Index File Version 2 is supposed to have the extended flag set to 0. For index entry " + entryIndex + ", it is set to " + indexEntryExtendedFlag); } // specific to version 3 and above, if the extended flag is set for the entry. if (indexFileVersion > 2 && indexEntryExtendedFlag == 1) { // if ( log.isDebugEnabled() ) log.debug ("For Index file version "+ // indexFileVersion +", reading an extra 16 bits for Entry "+ entryIndex ); short indexEntryExtendedFlags = dataBuffer.getShort(); entryBytesRead += 2; // if ( log.isDebugEnabled() ) log.debug ("Entry "+ entryIndex + " has (optional) // extended flags " + indexEntryExtendedFlags); } String indexEntryName = null; if (indexFileVersion > 3) { // if ( log.isDebugEnabled() ) log.debug("Inflating the (deflated) entry name for // index entry "+ entryIndex + " based on the previous entry name, since Index file // version "+ indexFileVersion + " requires this"); // get bytes until we find one with the msb NOT set. count the bytes. int n = 0, removeNfromPreviousName = 0; byte msbsetmask = (byte) (1 << 7); // 1000 0000 byte msbunsetmask = (byte) ((~msbsetmask) & 0xFF); // 0111 1111 while (++n > 0) { byte byteRead = dataBuffer.get(); entryBytesRead++; if (n == 1) // zero the msb of the first byte read removeNfromPreviousName = (removeNfromPreviousName << 8) | (0xFF & (byteRead & msbunsetmask)); else // set the msb of subsequent bytes read removeNfromPreviousName = (removeNfromPreviousName << 8) | (0xFF & (byteRead | msbsetmask)); if ((byteRead & msbsetmask) == 0) break; // break if msb is NOT set in the byte } // if (log.isDebugEnabled()) log.debug("We read "+ n + " bytes of variable length // data from before the start of the entry name"); if (n > 4) throw new Exception( "An entry name is never expected to be > 2^^32 bytes long. Some file corruption may have occurred, or a parsing error has occurred"); // now read the (partial) name for the current entry int bytesToReadCurrentNameEntry = indexEntryNameByteLength - (previousIndexEntryName.length() - removeNfromPreviousName); byte[] indexEntryNameBuffer = new byte[bytesToReadCurrentNameEntry]; dataBuffer.get(indexEntryNameBuffer); entryBytesRead += bytesToReadCurrentNameEntry; // build it up indexEntryName = previousIndexEntryName.substring(0, previousIndexEntryName.length() - removeNfromPreviousName) + new String(indexEntryNameBuffer); } else { // indexFileVersion <= 3 (waaaaay simpler logic, but the index file is larger in // this version than for v4+) byte[] indexEntryNameBuffer = new byte[indexEntryNameByteLength]; dataBuffer.get(indexEntryNameBuffer); entryBytesRead += indexEntryNameByteLength; indexEntryName = new String(indexEntryNameBuffer); } if (log.isDebugEnabled()) log.debug("Entry " + entryIndex + " has name " + indexEntryName); // and store off the index entry name, for the next iteration previousIndexEntryName = indexEntryName; // skip past the zero byte terminating the string (whose purpose seems completely // pointless to me, but hey) byte indexEntryNul = dataBuffer.get(); entryBytesRead++; // the padding after the pathname does not exist for versions 4 or later. if (indexFileVersion < 4) { // if ( log.isDebugEnabled() ) log.debug("Aligning to an 8 byte boundary after Entry // "+ entryIndex + ", since Index file version "+ indexFileVersion + " mandates 64 // bit alignment for index entries"); int entryBytesToRead = ((8 - (entryBytesRead % 8)) % 8); // if ( log.isDebugEnabled() ) { // log.debug ("The number of bytes read for index entry "+ entryIndex + " thus far // is: "+ entryBytesRead); // log.debug ("So we must read "+ entryBytesToRead + " bytes to stay on a 64 bit // boundary"); // } // read the 0-7 (NUL) bytes to keep reading index entries on an 8 byte boundary byte[] indexEntryPadBuffer = new byte[entryBytesToRead]; dataBuffer.get(indexEntryPadBuffer); entryBytesRead += entryBytesToRead; } else { // if ( log.isDebugEnabled() ) log.debug("Not aligning to an 8 byte boundary after // Entry "+ entryIndex + ", since Index file version "+ indexFileVersion + " does // not mandate 64 bit alignment for index entries"); } // Git does not store entries for directories, but just files/symlinks/Git links, so no // need to handle directories here, unlike with SVN, for instance. if (indexEntryName != null && indexEntryName.length() > 0) { // log.info("Found file/symbolic link/gitlink "+ indexEntryName + " in the Git // entries file"); map.put(indexEntryName, indexEntrySha1); } } return map; }
From source file:org.carbondata.core.util.CarbonUtil.java
/** * @param listOfNodeInfo//from w w w. j a v a 2 s .c om * @param filesLocation * @param measureCount * @param mdKeySize * @param fileSize * @return */ private static List<BlockletInfo> getBlockletDetails(List<BlockletInfo> listOfNodeInfo, String filesLocation, int measureCount, int mdKeySize, long fileSize) { long offset = fileSize - CarbonCommonConstants.LONG_SIZE_IN_BYTE; FileHolder fileHolder = FileFactory.getFileHolder(FileFactory.getFileType(filesLocation)); offset = fileHolder.readDouble(filesLocation, offset); int totalMetaDataLength = (int) (fileSize - CarbonCommonConstants.LONG_SIZE_IN_BYTE - offset); ByteBuffer buffer = ByteBuffer.wrap(fileHolder.readByteArray(filesLocation, offset, totalMetaDataLength)); buffer.rewind(); while (buffer.hasRemaining()) { int[] msrLength = new int[measureCount]; long[] msrOffset = new long[measureCount]; BlockletInfo info = new BlockletInfo(); byte[] startKey = new byte[mdKeySize]; byte[] endKey = new byte[mdKeySize]; info.setFileName(filesLocation); info.setNumberOfKeys(buffer.getInt()); info.setKeyLength(buffer.getInt()); info.setKeyOffset(buffer.getLong()); buffer.get(startKey); buffer.get(endKey); info.setStartKey(startKey); info.setEndKey(endKey); for (int i = 0; i < measureCount; i++) { msrLength[i] = buffer.getInt(); msrOffset[i] = buffer.getLong(); } info.setMeasureLength(msrLength); info.setMeasureOffset(msrOffset); listOfNodeInfo.add(info); } fileHolder.finish(); return listOfNodeInfo; }
From source file:org.apache.bookkeeper.client.BookieWriteLedgerTest.java
private void readEntries(LedgerHandle lh, List<byte[]> entries) throws InterruptedException, BKException { ls = lh.readEntries(0, numEntriesToWrite - 1); int index = 0; while (ls.hasMoreElements()) { ByteBuffer origbb = ByteBuffer.wrap(entries.get(index++)); Integer origEntry = origbb.getInt(); ByteBuffer result = ByteBuffer.wrap(ls.nextElement().getEntry()); LOG.debug("Length of result: " + result.capacity()); LOG.debug("Original entry: " + origEntry); Integer retrEntry = result.getInt(); LOG.debug("Retrieved entry: " + retrEntry); assertTrue("Checking entry " + index + " for equality", origEntry.equals(retrEntry)); }//from w ww . j a va2 s . c om }
From source file:de.hpi.fgis.hdrs.Triple.java
public void readFields(ByteBuffer buffer) { // read header Slen = buffer.getShort();/*w w w. j a v a 2s . c o m*/ Plen = buffer.getShort(); Olen = buffer.getInt(); multiplicity = buffer.getInt(); // read data this.buffer = buffer.array(); int size = (int) Slen + (int) Plen + Olen; offset = buffer.arrayOffset() + buffer.position(); buffer.position(buffer.position() + size); }
From source file:de.hpi.fgis.hdrs.Triple.java
public void readFields(ByteBuffer header, ByteBuffer data) { // read header Slen = header.getShort();//from w w w. j ava2 s . c om Plen = header.getShort(); Olen = header.getInt(); multiplicity = header.getInt(); // read data this.buffer = data.array(); int size = (int) Slen + (int) Plen + Olen; offset = data.arrayOffset() + data.position(); data.position(data.position() + size); }