List of usage examples for java.nio ByteOrder LITTLE_ENDIAN
ByteOrder LITTLE_ENDIAN
To view the source code for java.nio ByteOrder LITTLE_ENDIAN.
Click Source Link
From source file:nodomain.freeyourgadget.gadgetbridge.service.devices.pebble.PebbleProtocol.java
private byte[] encodeExtensibleNotification(int id, int timestamp, String title, String subtitle, String body, String sourceName, boolean hasHandle, String[] cannedReplies) { final short ACTION_LENGTH_MIN = 10; String[] parts = { title, subtitle, body }; // Calculate length first byte actions_count; short actions_length; String dismiss_string;// w ww . java 2 s. com String open_string = "Open on phone"; String mute_string = "Mute"; String reply_string = "Reply"; if (sourceName != null) { mute_string += " " + sourceName; } byte dismiss_action_id; if (hasHandle && !"ALARMCLOCKRECEIVER".equals(sourceName)) { actions_count = 3; dismiss_string = "Dismiss"; dismiss_action_id = 0x02; actions_length = (short) (ACTION_LENGTH_MIN * actions_count + dismiss_string.getBytes().length + open_string.getBytes().length + mute_string.getBytes().length); } else { actions_count = 1; dismiss_string = "Dismiss all"; dismiss_action_id = 0x03; actions_length = (short) (ACTION_LENGTH_MIN * actions_count + dismiss_string.getBytes().length); } int replies_length = -1; if (cannedReplies != null && cannedReplies.length > 0) { actions_count++; for (String reply : cannedReplies) { replies_length += reply.getBytes().length + 1; } actions_length += ACTION_LENGTH_MIN + reply_string.getBytes().length + replies_length + 3; // 3 = attribute id (byte) + length(short) } byte attributes_count = 0; int length = 21 + 10 + actions_length; if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { continue; } attributes_count++; length += (3 + s.getBytes().length); } } // Encode Prefix ByteBuffer buf = ByteBuffer.allocate(length + LENGTH_PREFIX); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) (length)); buf.putShort(ENDPOINT_EXTENSIBLENOTIFS); buf.order(ByteOrder.LITTLE_ENDIAN); // ! buf.put((byte) 0x00); // ? buf.put((byte) 0x01); // add notifications buf.putInt(0x00000000); // flags - ? buf.putInt(id); buf.putInt(0x00000000); // ANCS id buf.putInt(timestamp); buf.put((byte) 0x01); // layout - ? buf.put(attributes_count); buf.put(actions_count); byte attribute_id = 0; // Encode Pascal-Style Strings if (parts != null) { for (String s : parts) { attribute_id++; if (s == null || s.equals("")) { continue; } int partlength = s.getBytes().length; if (partlength > 255) partlength = 255; buf.put(attribute_id); buf.putShort((short) partlength); buf.put(s.getBytes(), 0, partlength); } } // dismiss action buf.put(dismiss_action_id); buf.put((byte) 0x04); // dismiss buf.put((byte) 0x01); // number attributes buf.put((byte) 0x01); // attribute id (title) buf.putShort((short) dismiss_string.getBytes().length); buf.put(dismiss_string.getBytes()); // open and mute actions if (hasHandle && !"ALARMCLOCKRECEIVER".equals(sourceName)) { buf.put((byte) 0x01); buf.put((byte) 0x02); // generic buf.put((byte) 0x01); // number attributes buf.put((byte) 0x01); // attribute id (title) buf.putShort((short) open_string.getBytes().length); buf.put(open_string.getBytes()); buf.put((byte) 0x04); buf.put((byte) 0x02); // generic buf.put((byte) 0x01); // number attributes buf.put((byte) 0x01); // attribute id (title) buf.putShort((short) mute_string.getBytes().length); buf.put(mute_string.getBytes()); } if (cannedReplies != null && replies_length > 0) { buf.put((byte) 0x05); buf.put((byte) 0x03); // reply action buf.put((byte) 0x02); // number attributes buf.put((byte) 0x01); // title buf.putShort((short) reply_string.getBytes().length); buf.put(reply_string.getBytes()); buf.put((byte) 0x08); // canned replies buf.putShort((short) replies_length); for (int i = 0; i < cannedReplies.length - 1; i++) { buf.put(cannedReplies[i].getBytes()); buf.put((byte) 0x00); } // last one must not be zero terminated, else we get an additional emply reply buf.put(cannedReplies[cannedReplies.length - 1].getBytes()); } return buf.array(); }
From source file:trendplot.TrendPlot.java
private void makeHistogram(String datFileName) throws FileNotFoundException, IOException { File datFile = new File(datFileName); try (BufferedInputStream ins = new BufferedInputStream(new FileInputStream(datFileName))) { boolean eof = false; minList = new ArrayList<>(); maxList = new ArrayList<>(); byte[] b = new byte[Float.SIZE / 8]; int nread; while (!eof) { try { nread = ins.read(b);/*www . ja v a 2s .com*/ float time = ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).getFloat(); nread += ins.read(b); float inmin = ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).getFloat(); nread += ins.read(b); float mean = ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).getFloat(); nread += ins.read(b); float inmax = ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).getFloat(); if (nread == Float.SIZE / 8 * 4) { minList.add(inmin); maxList.add(inmax); } else { eof = true; ins.close(); } } catch (EOFException ex) { eof = true; ins.close(); } catch (IOException ex) { ins.close(); throw ex; } } } Collections.sort(minList); Collections.sort(maxList); }
From source file:ome.io.bioformats.BfPyramidPixelBuffer.java
public synchronized PixelData getTile(Integer z, Integer c, Integer t, Integer x, Integer y, Integer w, Integer h) throws IOException { checkTileParameters(x, y, w, h);/*from w w w . j a va2 s. co m*/ t = getRasterizedT(z, c, t); c = 0; z = 0; PixelData data = delegate().getTile(z, c, t, x, y, w, h); data.setOrder(ByteOrder.LITTLE_ENDIAN); return data; }
From source file:edu.vu.isis.ammo.dash.provider.IncidentSyncAdaptor.java
public ArrayList<File> mediaSerialize(Cursor cursor) { logger.debug("::mediaSerialize"); ArrayList<File> paths = new ArrayList<File>(); if (1 > cursor.getCount()) return paths; ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream eos = new DataOutputStream(baos); for (boolean more = cursor.moveToFirst(); more; more = cursor.moveToNext()) { MediaWrapper iw = new MediaWrapper(); iw.setEventId(cursor.getString(cursor.getColumnIndex(MediaTableSchemaBase.EVENT_ID))); iw.setDataType(cursor.getString(cursor.getColumnIndex(MediaTableSchemaBase.DATA_TYPE))); iw.setData(cursor.getString(cursor.getColumnIndex(MediaTableSchemaBase.DATA))); iw.setCreatedDate(cursor.getLong(cursor.getColumnIndex(MediaTableSchemaBase.CREATED_DATE))); iw.setModifiedDate(cursor.getLong(cursor.getColumnIndex(MediaTableSchemaBase.MODIFIED_DATE))); iw.set_ReceivedDate(cursor.getLong(cursor.getColumnIndex(MediaTableSchemaBase._RECEIVED_DATE))); iw.set_Disposition(cursor.getInt(cursor.getColumnIndex(MediaTableSchemaBase._DISPOSITION))); Gson gson = new Gson(); try {/* w w w . j a v a 2s. c o m*/ eos.writeBytes(gson.toJson(iw)); eos.writeByte(0); } catch (IOException ex) { ex.printStackTrace(); } // not a reference field name :event id eventId event_id\n try { String fileName = iw.getData(); File dataFile = new File(fileName); int dataSize = (int) dataFile.length(); byte[] buffData = new byte[dataSize]; FileInputStream fileStream = new FileInputStream(dataFile); int ret = 0; for (int position = 0; (ret > -1 && dataSize > position); position += ret) { ret = fileStream.read(buffData, position, dataSize - position); } fileStream.close(); eos.writeBytes("data"); eos.writeByte(0); ByteBuffer dataSizeBuf = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE); dataSizeBuf.order(ByteOrder.LITTLE_ENDIAN); dataSizeBuf.putInt(dataSize); // write the media back out eos.write(dataSizeBuf.array()); eos.write(buffData); eos.write(dataSizeBuf.array()); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // not a reference field name :created date createdDate created_date\n // not a reference field name :modified date modifiedDate modified_date\n // MediaTableSchemaBase._DISPOSITION; // try { // TODO write to content provider using openFile // if (!applCacheMediaDir.exists() ) applCacheMediaDir.mkdirs(); // File outfile = new File(applCacheMediaDir, Integer.toHexString((int) System.currentTimeMillis())); // BufferedOutputStream bufferedOutput = new BufferedOutputStream(new FileOutputStream(outfile), 8192); // bufferedOutput.write(baos.toByteArray()); // bufferedOutput.flush(); // bufferedOutput.close(); // } catch (FileNotFoundException e) { // e.printStackTrace(); // } catch (IOException e) { // e.printStackTrace(); // } } return paths; }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReader.java
void decodeRecordType1(BufferedInputStream stream) throws IOException { dbgLog.fine("***** decodeRecordType1(): start *****"); if (stream == null) { throw new IllegalArgumentException("stream == null!"); }/*from w ww . jav a 2s. co m*/ // how to read each recordType // 1. set-up the following objects before reading bytes // a. the working byte array // b. the storage object // the length of this field: 172bytes = 60 + 4 + 12 + 4 + 8 + 84 // this field consists of 6 distinct blocks byte[] recordType1 = new byte[LENGTH_RECORDTYPE1]; // int caseWeightVariableOBSIndex = 0; try { int nbytes = stream.read(recordType1, 0, LENGTH_RECORDTYPE1); //printHexDump(recordType1, "recordType1"); if (nbytes == 0) { throw new IOException("reading recordType1: no byte was read"); } // 1.1 60 byte-String that tells the platform/version of SPSS that // wrote this file int offset_start = 0; int offset_end = LENGTH_SPSS_PRODUCT_INFO; // 60 bytes String productInfo = new String(Arrays.copyOfRange(recordType1, offset_start, offset_end), "US-ASCII"); dbgLog.fine("productInfo:\n" + productInfo + "\n"); dataTable.setOriginalFormatVersion(productInfo); // try to parse out the SPSS version that created this data // file: String spssVersionTag = null; String regexpVersionNumber = ".*Release ([0-9]*)"; Pattern versionTagPattern = Pattern.compile(regexpVersionNumber); Matcher matcher = versionTagPattern.matcher(productInfo); if (matcher.find()) { spssVersionTag = matcher.group(1); dbgLog.fine("SPSS Version Number: " + spssVersionTag); } // TODO: // try a more elaborate regex (like the one for the "new-style" // productInfo line, below), to select the version number, the // minor version number and the platform (windows vs. mac) separately. // would be cleaner to save just that, rather than the entire // productInfo tag. // -- L.A. 4.0 beta if (spssVersionTag == null || spssVersionTag.equals("")) { // Later versions of SPSS have different formatting of the // productInfo line: regexpVersionNumber = ".* IBM SPSS STATISTICS.* ([^ ]*) ([0-9][0-9]*)([^ ]*)"; versionTagPattern = Pattern.compile(regexpVersionNumber); matcher = versionTagPattern.matcher(productInfo); if (matcher.find()) { String spssPlatformTag = matcher.group(1); spssVersionTag = matcher.group(2); String spssVersionTagMinor = matcher.group(3); dbgLog.fine("SPSS Version Number (new style): " + spssVersionTag); dbgLog.fine("SPSS Version/Platform Identification (new style:) " + spssPlatformTag + " " + spssVersionTag + spssVersionTagMinor); dataTable .setOriginalFormatVersion(spssVersionTag + spssVersionTagMinor + " " + spssPlatformTag); } } if (spssVersionTag != null && !spssVersionTag.equals("")) { spssVersionNumber = Integer.valueOf(spssVersionTag).intValue(); /* * Starting with SPSS version 16, the default encoding is * UTF-8. * But we are only going to use it if the user did not explicitly * specify the encoding on the addfiles page. Then we'd want * to stick with whatever they entered. * (also, it appears that (starting with the same version 16?) * it is actually possible to define the locale/character set * in the file - section 7, sub-type 20; TODO: decide which * one takes precedence, if we have the encoding defined both * in the file and through the UI. -- L.A. 4.0 beta) */ if (spssVersionNumber > 15) { if (getDataLanguageEncoding() == null) { //defaultCharSet = "windows-1252"; // temporary! -- L.A. "UTF-8"; defaultCharSet = "UTF-8"; } } } // TODO: // decide if we want to save the [determined/guessed] character set // somewhere in the dataset object. // this may be relevant in cases when accented/non-latin characters // get ingested incorrectly; // -- L.A. 4.0 beta // 1.2) 4-byte file-layout-code (byte-order) offset_start = offset_end; offset_end += LENGTH_FILE_LAYOUT_CODE; // 4 byte ByteBuffer bb_fileLayout_code = ByteBuffer.wrap(recordType1, offset_start, LENGTH_FILE_LAYOUT_CODE); ByteBuffer byteOderTest = bb_fileLayout_code.duplicate(); // interprete the 4 byte as int int int2test = byteOderTest.getInt(); if (int2test == 2 || int2test == 3) { dbgLog.fine("integer == " + int2test + ": the byte-oder of the writer is the same " + "as the counterpart of Java: Big Endian"); } else { // Because Java's byte-order is always big endian, // this(!=2) means this sav file was written on a little-endian machine // non-string, multi-bytes blocks must be byte-reversed bb_fileLayout_code.order(ByteOrder.LITTLE_ENDIAN); int2test = bb_fileLayout_code.getInt(); if (int2test == 2 || int2test == 3) { dbgLog.fine("The sav file was saved on a little endian machine"); dbgLog.fine("Reveral of the bytes is necessary to decode " + "multi-byte, non-string blocks"); isLittleEndian = true; } else { throw new IOException("reading recordType1:unknown file layout code=" + int2test); } } dbgLog.fine("Endian of this platform:" + ByteOrder.nativeOrder().toString()); // 1.3 4-byte Number_Of_OBS_Units_Per_Case // (= how many RT2 records => how many varilables) offset_start = offset_end; offset_end += LENGTH_NUMBER_OF_OBS_UNITS_PER_CASE; // 4 byte ByteBuffer bb_OBS_units_per_case = ByteBuffer.wrap(recordType1, offset_start, LENGTH_NUMBER_OF_OBS_UNITS_PER_CASE); if (isLittleEndian) { bb_OBS_units_per_case.order(ByteOrder.LITTLE_ENDIAN); } OBSUnitsPerCase = bb_OBS_units_per_case.getInt(); dbgLog.fine("RT1: OBSUnitsPerCase=" + OBSUnitsPerCase); // 1.4 4-byte Compression_Switch offset_start = offset_end; offset_end += LENGTH_COMPRESSION_SWITCH; // 4 byte ByteBuffer bb_compression_switch = ByteBuffer.wrap(recordType1, offset_start, LENGTH_COMPRESSION_SWITCH); if (isLittleEndian) { bb_compression_switch.order(ByteOrder.LITTLE_ENDIAN); } int compression_switch = bb_compression_switch.getInt(); if (compression_switch == 0) { // data section is not compressed isDataSectionCompressed = false; dbgLog.fine("data section is not compressed"); } else { dbgLog.fine("data section is compressed:" + compression_switch); } // 1.5 4-byte Case-Weight Variable Index // warning: this variable index starts from 1, not 0 offset_start = offset_end; offset_end += LENGTH_CASE_WEIGHT_VARIABLE_INDEX; // 4 byte ByteBuffer bb_Case_Weight_Variable_Index = ByteBuffer.wrap(recordType1, offset_start, LENGTH_CASE_WEIGHT_VARIABLE_INDEX); if (isLittleEndian) { bb_Case_Weight_Variable_Index.order(ByteOrder.LITTLE_ENDIAN); } caseWeightVariableOBSIndex = bb_Case_Weight_Variable_Index.getInt(); /// caseWeightVariableOBSIndex will be used later on to locate /// the weight variable; so we'll be able to mark the corresponding /// variables properly. // TODO: make sure case weight variables are properly handled! // -- L.A. 4.0 beta ///smd.getFileInformation().put("caseWeightVariableOBSIndex", caseWeightVariableOBSIndex); // 1.6 4-byte Number of Cases offset_start = offset_end; offset_end += LENGTH_NUMBER_OF_CASES; // 4 byte ByteBuffer bb_Number_Of_Cases = ByteBuffer.wrap(recordType1, offset_start, LENGTH_NUMBER_OF_CASES); if (isLittleEndian) { bb_Number_Of_Cases.order(ByteOrder.LITTLE_ENDIAN); } int numberOfCases = bb_Number_Of_Cases.getInt(); if (numberOfCases < 0) { // -1 if numberOfCases is unknown throw new RuntimeException("number of cases is not recorded in the header"); } else { dbgLog.fine("RT1: number of cases is recorded= " + numberOfCases); dataTable.setCaseQuantity(new Long(numberOfCases)); ///caseQnty = numberOfCases; ///smd.getFileInformation().put("caseQnty", numberOfCases); } // 1.7 8-byte compression-bias [not long but double] offset_start = offset_end; offset_end += LENGTH_COMPRESSION_BIAS; // 8 byte ByteBuffer bb_compression_bias = ByteBuffer .wrap(Arrays.copyOfRange(recordType1, offset_start, offset_end)); if (isLittleEndian) { bb_compression_bias.order(ByteOrder.LITTLE_ENDIAN); } Double compressionBias = bb_compression_bias.getDouble(); // TODO: // check if this "compression bias" is being used anywhere? // doesn't seem to be! // -- 4.0 alpha if (compressionBias == 100d) { // 100 is expected dbgLog.fine("compressionBias is 100 as expected"); ///smd.getFileInformation().put("compressionBias", 100); } else { dbgLog.fine("compression bias is not 100: " + compressionBias); ///smd.getFileInformation().put("compressionBias", compressionBias); } // 1.8 84-byte File Creation Information (date/time: dd MM yyhh:mm:ss + // 64-bytelabel) offset_start = offset_end; offset_end += LENGTH_FILE_CREATION_INFO; // 84 bytes String fileCreationInfo = getNullStrippedString( new String(Arrays.copyOfRange(recordType1, offset_start, offset_end), "US-ASCII")); dbgLog.fine("fileCreationInfo:\n" + fileCreationInfo + "\n"); String fileCreationDate = fileCreationInfo.substring(0, length_file_creation_date); int dateEnd = length_file_creation_date + length_file_creation_time; String fileCreationTime = fileCreationInfo.substring(length_file_creation_date, (dateEnd)); String fileCreationNote = fileCreationInfo.substring(dateEnd, length_file_creation_label); dbgLog.fine("fileDate=" + fileCreationDate); dbgLog.fine("fileTime=" + fileCreationTime); dbgLog.fine("fileNote" + fileCreationNote); } catch (IOException ex) { throw ex; } dbgLog.fine("decodeRecordType1(): end"); }
From source file:org.bimserver.GeometryGenerator.java
private byte[] floatArrayToByteArray(float[] vertices) { if (vertices == null) { return null; }// w w w . ja va 2s . com ByteBuffer buffer = ByteBuffer.wrap(new byte[vertices.length * 4]); buffer.order(ByteOrder.LITTLE_ENDIAN); FloatBuffer asFloatBuffer = buffer.asFloatBuffer(); for (float f : vertices) { asFloatBuffer.put(f); } return buffer.array(); }
From source file:au.org.ala.layers.intersect.Grid.java
/** * Increase sampleEveryNthPoint to return a smaller grid. * * Grid max and min values may be skipped. * * This does not used previously cached data. * * @param sampleEveryNthPoint/*from w w w . jav a2 s . co m*/ * @return */ public float[] getGrid(int sampleEveryNthPoint) { int maxArrayLength = Integer.MAX_VALUE - 10; if (subgrids != null) { //sample points int size = 1000; double[][] points = new double[size * size][2]; int pos = 0; for (int i = 0; i < 1000; i++) { for (int j = 0; j < 1000; j++) { points[pos][0] = xmin + (xmax - xmin) * j / (double) size; points[pos][1] = ymax - (ymax - ymin) * i / (double) size; pos++; } } return getValues3(points, 64); } int length = (nrows / sampleEveryNthPoint) * (ncols); float[] ret = new float[length]; RandomAccessFile afile = null; File f2 = new File(filename + ".GRI"); try { //read of random access file can throw an exception if (!f2.exists()) { afile = new RandomAccessFile(filename + ".gri", "r"); } else { afile = new RandomAccessFile(filename + ".GRI", "r"); } int sz = (int) Math.min(afile.length() / sampleEveryNthPoint / sampleEveryNthPoint, maxArrayLength); sz += 8 - sz % 8; byte[] b = new byte[sz]; long i = 0; long max = 0; int len; while ((len = afile.read(b)) > 0) { ByteBuffer bb = ByteBuffer.wrap(b); if (byteorderLSB) { bb.order(ByteOrder.LITTLE_ENDIAN); } if (datatype.equalsIgnoreCase("UBYTE")) { max += len; max = Math.min(max, ret.length * (long) sampleEveryNthPoint); for (; i < max; i++) { ret[(int) (i / sampleEveryNthPoint)] = bb.get(); if (ret[(int) (i / sampleEveryNthPoint)] < 0) { ret[(int) (i / sampleEveryNthPoint)] += 256; } } } else if (datatype.equalsIgnoreCase("BYTE")) { max += len; max = Math.min(max, ret.length * (long) sampleEveryNthPoint); for (; i < max; i++) { ret[(int) (i / sampleEveryNthPoint)] = bb.get(); } } else if (datatype.equalsIgnoreCase("SHORT")) { max += len / 2; max = Math.min(max, ret.length * (long) sampleEveryNthPoint); for (; i < max; i++) { ret[(int) (i / sampleEveryNthPoint)] = bb.getShort(); } } else if (datatype.equalsIgnoreCase("INT")) { max += len / 4; max = Math.min(max, ret.length * (long) sampleEveryNthPoint); for (; i < max; i++) { ret[(int) (i / sampleEveryNthPoint)] = bb.getInt(); } } else if (datatype.equalsIgnoreCase("LONG")) { max += len / 8; max = Math.min(max, ret.length * (long) sampleEveryNthPoint); for (; i < max; i++) { ret[(int) (i / sampleEveryNthPoint)] = bb.getLong(); } } else if (datatype.equalsIgnoreCase("FLOAT")) { max += len / 4; max = Math.min(max, ret.length * (long) sampleEveryNthPoint); for (; i < max; i++) { ret[(int) (i / sampleEveryNthPoint)] = bb.getFloat(); } } else if (datatype.equalsIgnoreCase("DOUBLE")) { max += len / 8; max = Math.min(max, ret.length * (long) sampleEveryNthPoint); for (; i < max; i++) { ret[(int) (i / (long) sampleEveryNthPoint)] = (float) bb.getDouble(); } } else { // / should not happen; catch anyway... max += len / 4; for (; i < max; i++) { ret[(int) (i / (long) sampleEveryNthPoint)] = Float.NaN; } } } //replace not a number for (i = 0; i < length; i++) { if ((float) ret[(int) i] == (float) nodatavalue) { ret[(int) i] = Float.NaN; } else { ret[(int) i] *= rescale; } } } catch (Exception e) { logger.error("An error has occurred - probably a file error", e); } finally { if (afile != null) { try { afile.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } grid_data = ret; return ret; }
From source file:org.bimserver.GeometryGenerator.java
private byte[] intArrayToByteArray(int[] indices) { if (indices == null) { return null; }/*from w ww . j a v a 2 s . co m*/ ByteBuffer buffer = ByteBuffer.wrap(new byte[indices.length * 4]); buffer.order(ByteOrder.LITTLE_ENDIAN); IntBuffer asIntBuffer = buffer.asIntBuffer(); for (int i : indices) { asIntBuffer.put(i); } return buffer.array(); }
From source file:org.openpilot_nonag.uavtalk.UAVTalk.java
/** * Send an object through the telemetry link. * @throws IOException// w w w . jav a 2 s .c om * @param[in] obj Object handle to send * @param[in] type Transaction type \return Success (true), Failure (false) */ private boolean transmitSingleObject(int type, long objId, long instId, UAVObject obj) throws IOException { int length = 0; assert (objMngr != null && outStream != null); // IMPORTANT : obj can be null (when type is NACK for example) // Determine data length if (type == TYPE_OBJ_REQ || type == TYPE_ACK || type == TYPE_NACK) { length = 0; } else { length = obj.getNumBytes(); } ByteBuffer bbuf = ByteBuffer.allocate(MAX_PACKET_LENGTH); bbuf.order(ByteOrder.LITTLE_ENDIAN); // Setup type and object id fields bbuf.put((byte) (SYNC_VAL & 0xff)); bbuf.put((byte) (type & 0xff)); bbuf.putShort((short) (length + HEADER_LENGTH)); bbuf.putInt((int) objId); bbuf.putShort((short) (instId & 0xffff)); // Check length if (length >= MAX_PAYLOAD_LENGTH) { ++stats.txErrors; return false; } // Copy data (if any) if (length > 0) try { if (obj.pack(bbuf) == 0) { ++stats.txErrors; return false; } } catch (Exception e) { ++stats.txErrors; // TODO Auto-generated catch block e.printStackTrace(); return false; } // Calculate checksum bbuf.put((byte) (updateCRC(0, bbuf.array(), bbuf.position()) & 0xff)); int packlen = bbuf.position(); bbuf.position(0); byte[] dst = new byte[packlen]; bbuf.get(dst, 0, packlen); outStream.write(dst); // Update stats ++stats.txObjects; stats.txBytes += bbuf.position(); stats.txObjectBytes += length; // Done return true; }
From source file:nodomain.freeyourgadget.gadgetbridge.service.devices.pebble.PebbleProtocol.java
private byte[] encodeBlobdb(Object key, byte command, byte db, byte[] blob) { int length = 5; int key_length; if (key instanceof UUID) { key_length = LENGTH_UUID; } else if (key instanceof String) { key_length = ((String) key).getBytes().length; } else {// w w w .ja v a2 s. c o m LOG.warn("unknown key type"); return null; } if (key_length > 255) { LOG.warn("key is too long"); return null; } length += key_length; if (blob != null) { length += blob.length + 2; } ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) length); buf.putShort(ENDPOINT_BLOBDB); buf.order(ByteOrder.LITTLE_ENDIAN); buf.put(command); buf.putShort((short) mRandom.nextInt()); // token buf.put(db); buf.put((byte) key_length); if (key instanceof UUID) { UUID uuid = (UUID) key; buf.order(ByteOrder.BIG_ENDIAN); buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getLeastSignificantBits()); buf.order(ByteOrder.LITTLE_ENDIAN); } else { buf.put(((String) key).getBytes()); } if (blob != null) { buf.putShort((short) blob.length); buf.put(blob); } return buf.array(); }