List of usage examples for java.nio ByteBuffer position
public final Buffer position(int newPosition)
From source file:com.googlecode.mp4parser.boxes.microsoft.XtraBox.java
@Override public void _parseDetails(ByteBuffer content) { int boxSize = content.remaining(); data = content.slice(); //Keep this in case we fail to parse successfulParse = false;//from w w w. jav a2 s.c o m try { tags.clear(); while (content.remaining() > 0) { XtraTag tag = new XtraTag(); tag.parse(content); tags.addElement(tag); } int calcSize = detailSize(); if (boxSize != calcSize) { throw new RuntimeException("Improperly handled Xtra tag: Calculated sizes don't match ( " + boxSize + "/" + calcSize + ")"); } successfulParse = true; } catch (Exception e) { successfulParse = false; System.err.println("Malformed Xtra Tag detected: " + e.toString()); e.printStackTrace(); content.position(content.position() + content.remaining()); } finally { content.order(ByteOrder.BIG_ENDIAN); //Just in case we bailed out mid-parse we don't want to leave the byte order in MS land } }
From source file:de.rwhq.btree.LeafNode.java
/** * * adds an entry to this LeafNodes rawPage, does not sync! * * @param key// w w w. j a v a2 s .c o m * @param value */ private void addEntry(final K key, final V value) { final ByteBuffer buf = rawPage().bufferForWriting(0); int offset = offsetOfKey(key, true); if (offset == -1) { offset = offsetBehindLastEntry(); } else { // move everything including pos backwards System.arraycopy(buf.array(), offset, buf.array(), offset + keySerializer.getSerializedLength() + valueSerializer.getSerializedLength(), buf.capacity() - (offset + keySerializer.getSerializedLength() + valueSerializer.getSerializedLength())); } // insert both buf.position(offset); buf.put(keySerializer.serialize(key)); buf.put(valueSerializer.serialize(value)); setNumberOfEntries(getNumberOfEntries() + 1); }
From source file:com.linkedin.pinot.core.common.datatable.DataTableImplV2.java
/** * Construct data table from byte array. (broker side) *///from w w w . ja va 2 s .c o m public DataTableImplV2(@Nonnull ByteBuffer byteBuffer) throws IOException { // Read header. _numRows = byteBuffer.getInt(); _numColumns = byteBuffer.getInt(); int dictionaryMapStart = byteBuffer.getInt(); int dictionaryMapLength = byteBuffer.getInt(); int metadataStart = byteBuffer.getInt(); int metadataLength = byteBuffer.getInt(); int dataSchemaStart = byteBuffer.getInt(); int dataSchemaLength = byteBuffer.getInt(); int fixedSizeDataStart = byteBuffer.getInt(); int fixedSizeDataLength = byteBuffer.getInt(); int variableSizeDataStart = byteBuffer.getInt(); int variableSizeDataLength = byteBuffer.getInt(); // Read dictionary. if (dictionaryMapLength != 0) { byte[] dictionaryMapBytes = new byte[dictionaryMapLength]; byteBuffer.position(dictionaryMapStart); byteBuffer.get(dictionaryMapBytes); _dictionaryMap = deserializeDictionaryMap(dictionaryMapBytes); } else { _dictionaryMap = null; } // Read metadata. byte[] metadataBytes = new byte[metadataLength]; byteBuffer.position(metadataStart); byteBuffer.get(metadataBytes); _metadata = deserializeMetadata(metadataBytes); // Read data schema. if (dataSchemaLength != 0) { byte[] schemaBytes = new byte[dataSchemaLength]; byteBuffer.position(dataSchemaStart); byteBuffer.get(schemaBytes); _dataSchema = DataSchema.fromBytes(schemaBytes); _columnOffsets = new int[_dataSchema.size()]; _rowSizeInBytes = DataTableUtils.computeColumnOffsets(_dataSchema, _columnOffsets); } else { _dataSchema = null; _columnOffsets = null; _rowSizeInBytes = 0; } // Read fixed size data. if (fixedSizeDataLength != 0) { _fixedSizeDataBytes = new byte[fixedSizeDataLength]; byteBuffer.position(fixedSizeDataStart); byteBuffer.get(_fixedSizeDataBytes); _fixedSizeData = ByteBuffer.wrap(_fixedSizeDataBytes); } else { _fixedSizeDataBytes = null; _fixedSizeData = null; } // Read variable size data. if (variableSizeDataLength != 0) { _variableSizeDataBytes = new byte[variableSizeDataLength]; byteBuffer.position(variableSizeDataStart); byteBuffer.get(_variableSizeDataBytes); _variableSizeData = ByteBuffer.wrap(_variableSizeDataBytes); } else { _variableSizeDataBytes = null; _variableSizeData = null; } }
From source file:com.slytechs.capture.file.editor.AbstractRawIterator.java
/** * Searches for a packet record start within the file. If the record header is * not found exactly at the specified offset, the search is repeated by * starting the match at the offset + 1. Incrementing the offset until a match * is found or maxSearch has been reached. * /*w w w . j a va 2s . co m*/ * @param offset * offset within the file to start the search at. This is the first * byte to search for a record header match. * @param maxSearch * a limit on the search. The search will be performed within the * windows of offset <= search < (offset + maxSearch) * @return exact offset into the capture file of the start of the next record * header. -1 indicates that no record header was found at the offset * and with the limits set of maxSearch bytes. * @throws EOFException * end of file has been reached before the header could be matched. * This indicates that no positive match was made. * @throws IOException * any IO errors */ public long searchForRecordStart(final ByteBuffer buffer, final int index, final int maxSearch) throws EOFException, IOException { final int l = index + maxSearch - this.pattern.minLength(); for (int i = index; i < l; i++) { buffer.position(i); buffer.mark(); if (this.pattern.match(buffer) && verifyAdditionalRecords(buffer, 5)) { return i; } } return -1; }
From source file:com.koda.integ.hbase.blockcache.OffHeapBlockCache.java
/** * Store external with codec.// ww w .j a va2 s . c om * Format: * 0..3 - total record size (-4) * 4..7 - size of a key in bytes (16 if use hash128) * 8 .. x - key data * x+1 ..x+1- IN_MEMORY flag ( 1- in memory, 0 - not) * x+2 ... block, serialized and compressed * * @param blockName the block name * @param buf the buf * @param inMemory the in memory * @throws IOException Signals that an I/O exception has occurred. */ private void storeExternalWithCodec(String blockName, Cacheable buf, boolean inMemory) throws IOException { // If external storage is disable - bail out if (overflowExtEnabled == false) { return; } byte[] hashed = Utils.hash128(blockName); ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress().getBuffer(); deserializer.set(buf.getDeserializer()); SerDe serde = extStorageCache.getSerDe(); Codec codec = extStorageCache.getCompressionCodec(); buffer.clear(); buffer.position(4); // Save key buffer.putInt(hashed.length); buffer.put(hashed); buffer.put(inMemory ? (byte) 1 : (byte) 0); if (buf != null) { serde.writeCompressed(buffer, buf, codec); int pos = buffer.position(); buffer.putInt(0, pos - 4); } buffer.flip(); StorageHandle handle = storage.storeData(buffer); try { // WE USE byte array as a key extStorageCache.put(hashed, handle.toBytes()); } catch (Exception e) { throw new IOException(e); } }
From source file:com.koda.integ.hbase.storage.FileExtStorage.java
/** * Stores multiple objects in one transaction * Format of a buffer:/*from w w w .j a v a2 s . c om*/ * 0..3 - total size of a batch * 4.. - batch of blocks * * @param buf the buf * @return the list */ public List<StorageHandle> storeDataBatch(ByteBuffer buf) { List<StorageHandle> handles = storeDataNoReleaseLock(buf); if (handles == null) { handles = new ArrayList<StorageHandle>(); int size = buf.getInt(0); buf.position(4); while (buf.position() < size + 4) { buf.limit(buf.capacity()); StorageHandle fsh = storeData(buf); handles.add(fsh); } } return handles; }
From source file:com.healthmarketscience.jackcess.Database.java
/** * Returns the password mask retrieved from the given header page and * format, or {@code null} if this format does not use a password mask. *///from w w w.j av a2 s. c om static byte[] getPasswordMask(ByteBuffer buffer, JetFormat format) { // get extra password mask if necessary (the extra password mask is // generated from the database creation date stored in the header) int pwdMaskPos = format.OFFSET_HEADER_DATE; if (pwdMaskPos < 0) { return null; } buffer.position(pwdMaskPos); double dateVal = Double.longBitsToDouble(buffer.getLong()); byte[] pwdMask = new byte[4]; ByteBuffer.wrap(pwdMask).order(PageChannel.DEFAULT_BYTE_ORDER).putInt((int) dateVal); return pwdMask; }
From source file:morphy.service.SocketConnectionService.java
protected synchronized String readMessage(SocketChannel channel) { try {//from w ww. j av a 2s . co m ByteBuffer buffer = ByteBuffer.allocate(maxCommunicationSizeBytes); int charsRead = -1; try { charsRead = channel.read(buffer); } catch (IOException cce) { if (channel.isOpen()) { channel.close(); if (LOG.isInfoEnabled()) { LOG.info("Closed channel " + channel); } } } if (charsRead == -1) { return null; } else if (charsRead > 0) { buffer.flip(); Charset charset = Charset.forName(Morphy.getInstance().getMorphyPreferences() .getString(PreferenceKeys.SocketConnectionServiceCharEncoding)); SocketChannelUserSession socketChannelUserSession = socketToSession.get(channel.socket()); byte[] bytes = buffer.array(); buffer.position(0); System.out.println("IN: " + new String(bytes).trim()); if (looksLikeTimesealInit(bytes)) { if (socketChannelUserSession.usingTimeseal == false) { // First time? socketChannelUserSession.usingTimeseal = true; return MSG_TIMESEAL_OK; } } if (socketChannelUserSession.usingTimeseal) { /* * Clients may pass multiple Timeseal-encoded messages at once. * We need to parse each separated message to Timeseal decoder as necessary. */ byte[] bytesToDecode = Arrays.copyOfRange(bytes, 0, charsRead - 1 /* \n or 10 */); byte[][] splitBytes = TimesealCoder.splitBytes(bytesToDecode, (byte) 10); buffer = ByteBuffer.allocate(bytesToDecode.length); buffer.position(0); for (int i = 0; i < splitBytes.length; i++) { byte[] splitBytesToDecode = splitBytes[i]; TimesealParseResult parseResult = timesealCoder.decode(splitBytesToDecode); if (parseResult != null) { System.out.println(parseResult.getTimestamp()); parseResult.setMessage(parseResult.getMessage() + "\n"); System.out.println(parseResult.getMessage()); buffer.put(parseResult.getMessage().getBytes(charset)); } } //buffer.position(0); buffer.flip(); } CharsetDecoder decoder = charset.newDecoder(); CharBuffer charBuffer = decoder.decode(buffer); String message = charBuffer.toString(); return message; //System.out.println(message); //return ""; } else { return ""; } } catch (Throwable t) { if (LOG.isErrorEnabled()) LOG.error("Error reading SocketChannel " + channel.socket().getLocalAddress(), t); return null; } }
From source file:com.linkedin.pinot.common.utils.DataTable.java
private void deserializeDataTable(ByteBuffer input) { numRows = input.getInt();/*from w ww. j a v a 2 s. c o m*/ numCols = input.getInt(); // READ dictionary final int dictionaryStart = input.getInt(); final int dictionaryLength = input.getInt(); final int metadataStart = input.getInt(); final int metadataLength = input.getInt(); final int schemaStart = input.getInt(); final int schemaLength = input.getInt(); final int fixedDataStart = input.getInt(); final int fixedDataLength = input.getInt(); final int variableDataStart = input.getInt(); final int variableDataLength = input.getInt(); // READ DICTIONARY byte[] dictionaryBytes = null; if (dictionaryLength != 0) { dictionaryBytes = new byte[dictionaryLength]; input.position(dictionaryStart); input.get(dictionaryBytes); dictionary = (Map<String, Map<Integer, String>>) deserializeDictionary(dictionaryBytes); } else { dictionary = new HashMap<String, Map<Integer, String>>(1); } // READ METADATA byte[] metadataBytes; if (metadataLength != 0) { metadataBytes = new byte[metadataLength]; input.position(metadataStart); input.get(metadataBytes); metadata = (Map<String, String>) deserializeMetadata(metadataBytes); } else { metadata = new HashMap<String, String>(); } // READ SCHEMA byte[] schemaBytes; if (schemaLength != 0) { schemaBytes = new byte[schemaLength]; input.position(schemaStart); input.get(schemaBytes); schema = DataSchema.fromBytes(schemaBytes); columnOffsets = computeColumnOffsets(schema); } // READ FIXED SIZE DATA BYTES if (fixedDataLength != 0) { fixedSizeDataBytes = new byte[fixedDataLength]; input.position(fixedDataStart); input.get(fixedSizeDataBytes); fixedSizeData = ByteBuffer.wrap(fixedSizeDataBytes); } // READ VARIABLE SIZE DATA BYTES if (variableDataLength != 0) { variableSizeDataBytes = new byte[variableDataLength]; input.position(variableDataStart); input.get(variableSizeDataBytes); variableSizeData = ByteBuffer.wrap(variableSizeDataBytes); } }
From source file:au.org.ala.delta.intkey.model.IntkeyDatasetFileReader.java
/** * Read attributes from the items file// w w w.ja v a2 s. c o m * * @param itemFileHeader * item file header * @param itemBinFile * item file data * @param c * character that we want attributes for * @param taxa * taxa that we want attributes for * @return a list of attributes for the supplied character and taxa. */ private static List<Attribute> readAttributes(ItemsFileHeader itemFileHeader, BinFile itemBinFile, Character c, List<Item> taxa) { List<Attribute> retList = new ArrayList<Attribute>(); int totalNumChars = itemFileHeader.getNChar(); int totalNumTaxa = itemFileHeader.getNItem(); seekToRecord(itemBinFile, itemFileHeader.getRpCdat()); List<Integer> charAttributeDataRecordIndicies = readIntegerList(itemBinFile, totalNumChars); // Subtract 1 from the charNo because characters are zero indexed in // intkey API int charNo = c.getCharacterId(); int charTaxonDataRecordIndex = charAttributeDataRecordIndicies.get(charNo - 1); seekToRecord(itemBinFile, charTaxonDataRecordIndex); if (c instanceof MultiStateCharacter) { MultiStateCharacter multiStateChar = (MultiStateCharacter) c; int bitsPerTaxon = multiStateChar.getStates().length + 1; int totalBitsNeeded = bitsPerTaxon * totalNumTaxa; int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalBitsNeeded) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaData = Utils.byteArrayToBooleanArray(bytes); for (Item t : taxa) { int startIndex = (t.getItemNumber() - 1) * bitsPerTaxon; // Taxa // numbers // are // 1 // indexed // instead // of 0 // indexed int endIndex = startIndex + bitsPerTaxon; boolean[] taxonData = Arrays.copyOfRange(taxaData, startIndex, endIndex); // Taxon data consists of a bit for each state, indicating // the states presence, followed by // a final bit signifying whether or not the character is // inapplicable for the taxon. boolean inapplicable = taxonData[taxonData.length - 1]; HashSet<Integer> presentStates = new HashSet<Integer>(); for (int k = 0; k < taxonData.length - 1; k++) { boolean statePresent = taxonData[k]; if (statePresent) { presentStates.add(k + 1); } } SimpleAttributeData attrData = new SimpleAttributeData(presentStates.isEmpty(), inapplicable); MultiStateAttribute msAttr = new MultiStateAttribute(multiStateChar, attrData); msAttr.setItem(t); msAttr.setPresentStates(presentStates); retList.add(msAttr); } } else if (c instanceof IntegerCharacter) { IntegerCharacter intChar = (IntegerCharacter) c; int charMinValue = intChar.getMinimumValue(); int charMaxValue = intChar.getMaximumValue(); // 1 bit for all values below minimum, 1 bit for each value between // minimum and maximum (inclusive), // 1 bit for all values above maximum, 1 inapplicability bit. int bitsPerTaxon = charMaxValue - charMinValue + 4; int totalBitsNeeded = bitsPerTaxon * totalNumTaxa; int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalBitsNeeded) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaData = Utils.byteArrayToBooleanArray(bytes); for (Item t : taxa) { int startIndex = (t.getItemNumber() - 1) * bitsPerTaxon; // Taxa // numbers // are // 1 // indexed // instead // of 0 // indexed int endIndex = startIndex + bitsPerTaxon; boolean[] taxonData = Arrays.copyOfRange(taxaData, startIndex, endIndex); boolean inapplicable = taxonData[taxonData.length - 1]; Set<Integer> presentValues = new HashSet<Integer>(); for (int k = 0; k < taxonData.length - 1; k++) { boolean present = taxonData[k]; if (present) { presentValues.add(k + charMinValue - 1); } } IntegerAttribute intAttr = new IntegerAttribute(intChar, new SimpleAttributeData(presentValues.isEmpty(), inapplicable)); intAttr.setItem(t); intAttr.setPresentValues(presentValues); retList.add(intAttr); } } else if (c instanceof RealCharacter) { // Read NI inapplicability bits int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalNumTaxa) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaInapplicabilityData = Utils.byteArrayToBooleanArray(bytes); int recordsSpannedByInapplicabilityData = recordsSpannedByBytes(bytesToRead); seekToRecord(itemBinFile, charTaxonDataRecordIndex + recordsSpannedByInapplicabilityData); // Read two float values per taxon List<Float> taxonData = readFloatList(itemBinFile, totalNumTaxa * 2); for (Item t : taxa) { int taxonNumber = t.getItemNumber(); float lowerFloat = taxonData.get((taxonNumber - 1) * 2); float upperFloat = taxonData.get(((taxonNumber - 1) * 2) + 1); boolean inapplicable = taxaInapplicabilityData[taxonNumber - 1]; // Character is unknown for the corresponding taxon if // lowerfloat > upperfloat boolean unknown = lowerFloat > upperFloat; RealAttribute realAttr = new RealAttribute((RealCharacter) c, new SimpleAttributeData(unknown, inapplicable)); if (!unknown) { FloatRange range = new FloatRange(lowerFloat, upperFloat); realAttr.setPresentRange(range); } realAttr.setItem(t); retList.add(realAttr); } } else if (c instanceof TextCharacter) { TextCharacter textChar = (TextCharacter) c; // Read NI inapplicability bits int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalNumTaxa) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaInapplicabilityData = Utils.byteArrayToBooleanArray(bytes); int recordsSpannedByInapplicabilityData = recordsSpannedByBytes(bytesToRead); seekToRecord(itemBinFile, charTaxonDataRecordIndex + recordsSpannedByInapplicabilityData); List<Integer> taxonTextDataOffsets = readIntegerList(itemBinFile, totalNumTaxa + 1); int recordsSpannedByOffsets = recordsSpannedByBytes((totalNumTaxa + 1) * Constants.SIZE_INT_IN_BYTES); seekToRecord(itemBinFile, charTaxonDataRecordIndex + recordsSpannedByInapplicabilityData + recordsSpannedByOffsets); ByteBuffer taxonTextData = itemBinFile.readByteBuffer( taxonTextDataOffsets.get(taxonTextDataOffsets.size() - taxonTextDataOffsets.get(0))); for (Item t : taxa) { int taxonNumber = t.getItemNumber(); int lowerOffset = taxonTextDataOffsets.get(taxonNumber - 1); int upperOffset = taxonTextDataOffsets.get((taxonNumber - 1) + 1); int textLength = upperOffset - lowerOffset; String txt = ""; if (textLength > 0) { byte[] textBytes = new byte[textLength]; taxonTextData.position(lowerOffset - 1); taxonTextData.get(textBytes); txt = BinFileEncoding.decode(textBytes); } boolean inapplicable = taxaInapplicabilityData[taxonNumber - 1]; boolean unknown = StringUtils.isEmpty(txt); TextAttribute txtAttr = new TextAttribute(textChar, new SimpleAttributeData(unknown, inapplicable)); try { txtAttr.setText(txt); } catch (DirectiveException e) { // The SimpleAttributeData implementation won't throw this // Exception. } txtAttr.setItem(t); retList.add(txtAttr); } } return retList; }