List of usage examples for android.hardware SensorManager getRotationMatrixFromVector
public static void getRotationMatrixFromVector(float[] R, float[] rotationVector)
From source file:com.bolatu.gezkoncsvlogger.GyroOrientation.ImuOCfQuaternion.java
/** * Calculate the fused orientation./*from www . j a v a 2s.c om*/ */ private void calculateFusedOrientation() { float oneMinusCoeff = (1.0f - filterCoefficient); // Apply the complementary filter. // We multiply each rotation by their // coefficients (scalar matrices)... // Scale our quaternion for the gyroscope quatGyro = quatGyro.multiply(filterCoefficient); // Scale our quaternion for the accel/mag quatAccelMag = quatAccelMag.multiply(1 - oneMinusCoeff); // ...and then add the two quaternions together. // output[0] = alpha * output[0] + (1 - alpha) * input[0]; quatGyro = quatGyro.add(quatAccelMag); // Now we get a structure we can pass to get a rotation matrix, and then // an orientation vector from Android. qvFusedOrientation[0] = (float) quatGyro.getVectorPart()[0]; qvFusedOrientation[1] = (float) quatGyro.getVectorPart()[1]; qvFusedOrientation[2] = (float) quatGyro.getVectorPart()[2]; qvFusedOrientation[3] = (float) quatGyro.getScalarPart(); // We need a rotation matrix so we can get the orientation vector... // Getting Euler angles from a quaternion is not trivial, so this is the // easiest way, but perhaps not the fastest way of doing this. SensorManager.getRotationMatrixFromVector(rmFusedOrientation, qvFusedOrientation); // Get the fused orienatation SensorManager.getOrientation(rmFusedOrientation, vFusedOrientation); }
From source file:com.bolatu.gezkoncsvlogger.GyroOrientation.ImuOKfQuaternion.java
/** * Calculate the fused orientation.//from w w w . j a va2 s. c o m */ private void calculateFusedOrientation() { qvOrientationGyroscope[0] = (float) quatGyro.getVectorPart()[0]; qvOrientationGyroscope[1] = (float) quatGyro.getVectorPart()[1]; qvOrientationGyroscope[2] = (float) quatGyro.getVectorPart()[2]; qvOrientationGyroscope[3] = (float) quatGyro.getScalarPart(); // Apply the Kalman filter... Note that the prediction and correction // inputs could be swapped, but the filter is much more stable in this // configuration. kalmanFilter.predict(qvOrientationGyroscope); kalmanFilter.correct(qvOrientationAccelMag); // Apply the new gyroscope delta rotation to the new Kalman filter // rotation estimation. quatGyro = new Quaternion(kalmanFilter.getStateEstimation()[3], Arrays.copyOfRange(kalmanFilter.getStateEstimation(), 0, 3)); // Now we get a structure we can pass to get a rotation matrix, and then // an orientation vector from Android. qvFusedOrientation[0] = (float) kalmanFilter.getStateEstimation()[0]; qvFusedOrientation[1] = (float) kalmanFilter.getStateEstimation()[1]; qvFusedOrientation[2] = (float) kalmanFilter.getStateEstimation()[2]; qvFusedOrientation[3] = (float) kalmanFilter.getStateEstimation()[3]; // We need a rotation matrix so we can get the orientation vector... // Getting Euler // angles from a quaternion is not trivial, so this is the easiest way, // but perhaps // not the fastest way of doing this. SensorManager.getRotationMatrixFromVector(rmFusedOrientation, qvFusedOrientation); // Get the fused orienatation SensorManager.getOrientation(rmFusedOrientation, vFusedOrientation); }
From source file:com.kircherelectronics.accelerationexplorer.filter.ImuLaKfQuaternion.java
/** * Calculate the fused orientation.// w w w. jav a 2 s . c o m */ private void calculateFusedOrientation() { vectorGyro[0] = (float) quatGyro.getVectorPart()[0]; vectorGyro[1] = (float) quatGyro.getVectorPart()[1]; vectorGyro[2] = (float) quatGyro.getVectorPart()[2]; vectorGyro[3] = (float) quatGyro.getScalarPart(); // Apply the Kalman filter... Note that the prediction and correction // inputs could be swapped, but the filter is much more stable in this // configuration. kalmanFilter.predict(vectorGyro); kalmanFilter.correct(vectorAccelMag); // Apply the new gyroscope delta rotation to the new Kalman filter // rotation estimation. quatGyro = new Quaternion(kalmanFilter.getStateEstimation()[3], Arrays.copyOfRange(kalmanFilter.getStateEstimation(), 0, 3)); // Now we get a structure we can pass to get a rotation matrix, and then // an orientation vector from Android. fusedVector[0] = (float) kalmanFilter.getStateEstimation()[0]; fusedVector[1] = (float) kalmanFilter.getStateEstimation()[1]; fusedVector[2] = (float) kalmanFilter.getStateEstimation()[2]; fusedVector[3] = (float) kalmanFilter.getStateEstimation()[3]; // We need a rotation matrix so we can get the orientation vector... // Getting Euler // angles from a quaternion is not trivial, so this is the easiest way, // but perhaps // not the fastest way of doing this. SensorManager.getRotationMatrixFromVector(fusedMatrix, fusedVector); // Get the fused orienatation SensorManager.getOrientation(fusedMatrix, fusedOrientation); }
From source file:com.kircherelectronics.accelerationexplorer.filter.ImuLaCfQuaternion.java
/** * Calculate the fused orientation./*from w w w . j a v a 2s . co m*/ */ private void calculateFusedOrientation() { float oneMinusCoeff = (1.0f - filterCoefficient); // Apply the complementary filter. // We multiply each rotation by their // coefficients (scalar matrices)... // Scale our quaternion for the gyroscope quatGyro = quatGyro.multiply(filterCoefficient); // Scale our quaternion for the accel/mag quatAccelMag = quatAccelMag.multiply(1 - oneMinusCoeff); // ...and then add the two quaternions together. // output[0] = alpha * output[0] + (1 - alpha) * input[0]; quatGyro = quatGyro.add(quatAccelMag); // Now we get a structure we can pass to get a rotation matrix, and then // an orientation vector from Android. fusedVector[0] = (float) quatGyro.getVectorPart()[0]; fusedVector[1] = (float) quatGyro.getVectorPart()[1]; fusedVector[2] = (float) quatGyro.getVectorPart()[2]; fusedVector[3] = (float) quatGyro.getScalarPart(); // We need a rotation matrix so we can get the orientation vector... // Getting Euler // angles from a quaternion is not trivial, so this is the easiest way, // but perhaps // not the fastest way of doing this. SensorManager.getRotationMatrixFromVector(fusedMatrix, fusedVector); // Get the fused orienatation SensorManager.getOrientation(fusedMatrix, fusedOrientation); }
From source file:com.shadowmaps.example.GpsTestActivity.java
@TargetApi(Build.VERSION_CODES.GINGERBREAD) @Override// w ww.j a va2 s. c o m public void onSensorChanged(SensorEvent event) { double orientation = Double.NaN; double tilt = Double.NaN; switch (event.sensor.getType()) { case Sensor.TYPE_ROTATION_VECTOR: // Modern rotation vector sensors if (!mTruncateVector) { try { SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values); } catch (IllegalArgumentException e) { // On some Samsung devices, an exception is thrown if this vector > 4 (see #39) // Truncate the array, since we can deal with only the first four values Log.e(TAG, "Samsung device error? Will truncate vectors - " + e); mTruncateVector = true; // Do the truncation here the first time the exception occurs getRotationMatrixFromTruncatedVector(event.values); } } else { // Truncate the array to avoid the exception on some devices (see #39) getRotationMatrixFromTruncatedVector(event.values); } int rot = getWindowManager().getDefaultDisplay().getRotation(); switch (rot) { case Surface.ROTATION_0: // No orientation change, use default coordinate system SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-0"); break; case Surface.ROTATION_90: // Log.d(TAG, "Rotation-90"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_180: // Log.d(TAG, "Rotation-180"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_X, SensorManager.AXIS_MINUS_Y, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_270: // Log.d(TAG, "Rotation-270"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; default: // This shouldn't happen - assume default orientation SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-Unknown"); break; } orientation = Math.toDegrees(mValues[0]); // azimuth tilt = Math.toDegrees(mValues[1]); break; case Sensor.TYPE_ORIENTATION: // Legacy orientation sensors orientation = event.values[0]; break; default: // A sensor we're not using, so return return; } // Correct for true north, if preference is set if (mFaceTrueNorth && mGeomagneticField != null) { orientation += mGeomagneticField.getDeclination(); } for (GpsTestListener listener : mGpsTestListeners) { listener.onOrientationChanged(orientation, tilt); } }
From source file:com.android.gpstest.GpsTestActivity.java
@TargetApi(Build.VERSION_CODES.GINGERBREAD) @Override/*from w w w .j a va 2s . co m*/ public void onSensorChanged(SensorEvent event) { double orientation = Double.NaN; double tilt = Double.NaN; switch (event.sensor.getType()) { case Sensor.TYPE_ROTATION_VECTOR: // Modern rotation vector sensors if (!mTruncateVector) { try { SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values); } catch (IllegalArgumentException e) { // On some Samsung devices, an exception is thrown if this vector > 4 (see #39) // Truncate the array, since we can deal with only the first four values Log.e(TAG, "Samsung device error? Will truncate vectors - " + e); mTruncateVector = true; // Do the truncation here the first time the exception occurs getRotationMatrixFromTruncatedVector(event.values); } } else { // Truncate the array to avoid the exception on some devices (see #39) getRotationMatrixFromTruncatedVector(event.values); } int rot = getWindowManager().getDefaultDisplay().getRotation(); switch (rot) { case Surface.ROTATION_0: // No orientation change, use default coordinate system SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-0"); break; case Surface.ROTATION_90: // Log.d(TAG, "Rotation-90"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_180: // Log.d(TAG, "Rotation-180"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_X, SensorManager.AXIS_MINUS_Y, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_270: // Log.d(TAG, "Rotation-270"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; default: // This shouldn't happen - assume default orientation SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-Unknown"); break; } orientation = Math.toDegrees(mValues[0]); // azimuth tilt = Math.toDegrees(mValues[1]); break; case Sensor.TYPE_ORIENTATION: // Legacy orientation sensors orientation = event.values[0]; break; default: // A sensor we're not using, so return return; } // Correct for true north, if preference is set if (mFaceTrueNorth && mGeomagneticField != null) { orientation += mGeomagneticField.getDeclination(); // Make sure value is between 0-360 orientation = MathUtils.mod((float) orientation, 360.0f); } for (GpsTestListener listener : mGpsTestListeners) { listener.onOrientationChanged(orientation, tilt); } }
From source file:com.thousandthoughts.tutorials.SensorFusionActivity.java
public void gyroFunction(SensorEvent event) { // don't start until first accelerometer/magnetometer orientation has been acquired if (accMagOrientation == null) return;//from w w w .j a v a2 s .c o m // initialisation of the gyroscope based rotation matrix if (initState) { float[] initMatrix = new float[9]; initMatrix = getRotationMatrixFromOrientation(accMagOrientation); float[] test = new float[3]; SensorManager.getOrientation(initMatrix, test); gyroMatrix = matrixMultiplication(gyroMatrix, initMatrix); initState = false; } // copy the new gyro values into the gyro array // convert the raw gyro data into a rotation vector float[] deltaVector = new float[4]; if (timestamp != 0) { final float dT = (event.timestamp - timestamp) * NS2S; System.arraycopy(event.values, 0, gyro, 0, 3); getRotationVectorFromGyro(gyro, deltaVector, dT / 2.0f); } // measurement done, save current time for next interval timestamp = event.timestamp; // convert rotation vector into rotation matrix float[] deltaMatrix = new float[9]; SensorManager.getRotationMatrixFromVector(deltaMatrix, deltaVector); // apply the new rotation interval on the gyroscope based rotation matrix gyroMatrix = matrixMultiplication(gyroMatrix, deltaMatrix); // get the gyroscope based orientation from the rotation matrix SensorManager.getOrientation(gyroMatrix, gyroOrientation); }
From source file:com.android.gpstest.GpsTestActivity.java
@TargetApi(Build.VERSION_CODES.GINGERBREAD) private void getRotationMatrixFromTruncatedVector(float[] vector) { System.arraycopy(vector, 0, mTruncatedRotationVector, 0, 4); SensorManager.getRotationMatrixFromVector(mRotationMatrix, mTruncatedRotationVector); }
From source file:com.woofy.haifa.MapActivity.java
@Override public void onSensorChanged(SensorEvent event) { if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR && rotation_on) { SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values); float[] orientation = new float[3]; SensorManager.getOrientation(mRotationMatrix, orientation); bearing = (float) (Math.toDegrees(orientation[0]) + mDeclination); // updateCamera(bearing); TODO }/*from ww w. j a va2 s .c om*/ }
From source file:com.example.sensingapp.SensingApp.java
public void recordSensingInfo(SensorData senData) { String sRecordLine;// ww w . j a v a 2s . co m String sTimeField; Date dtCurDate; int i; long lStartTime = 0; long lCurrentTime = 0; SimpleDateFormat spdRecordTime, spdCurDateTime; final String DATE_FORMAT = "yyyyMMddHHmmss"; final String DATE_FORMAT_S = "yyMMddHHmmssSSS"; //"yyyyMMddHHmmssSSS" int nSensorReadingType = SENSOR_EVENT_NULL; int nSensorDataType; if (m_blnRecordStatus == false) { //Stopped return; } dtCurDate = new Date(); // Timestamp for the record spdRecordTime = new SimpleDateFormat(DATE_FORMAT_S); sTimeField = spdRecordTime.format(dtCurDate); nSensorDataType = senData.getSensorDataType(); if (nSensorDataType == DATA_TYPE_SENSOR) { SensorEvent event; event = senData.getSensorEvent(); synchronized (this) { switch (event.sensor.getType()) { case Sensor.TYPE_ACCELEROMETER: //X, Y, Z if (m_blnAcclEnabled) { m_sAccl = Float.toString(event.values[0]) + "," + Float.toString(event.values[1]) + "," + Float.toString(event.values[2]) + ","; nSensorReadingType = SENSOR_EVENT_ACCL; } // if (m_blnOrientEnabled) { // m_arrfAcclValues = event.values.clone(); // // if (calculateOrientation()) { // //Azimuth (rotation around z-axis); Pitch (rotation around x-axis), Roll (rotation around y-axis) // m_sOrient = Float.toString(m_arrfOrientValues[0]) + "," + // Float.toString(m_arrfOrientValues[1]) + "," + // Float.toString(m_arrfOrientValues[2]) + ","; // // nSensorReadingType = SENSOR_EVENT_ORIENT; // // } // } break; case Sensor.TYPE_LINEAR_ACCELERATION: //X,Y,Z if (m_blnLinearAcclEnabled) { m_sLinearAccl = Float.toString(event.values[0]) + "," + Float.toString(event.values[1]) + "," + Float.toString(event.values[2]) + ","; nSensorReadingType = SENSOR_EVENT_LINEAR_ACCL; } break; case Sensor.TYPE_GRAVITY: //X,Y,Z if (m_blnGravityEnabled) { m_sGravity = Float.toString(event.values[0]) + "," + Float.toString(event.values[1]) + "," + Float.toString(event.values[2]) + ","; nSensorReadingType = SENSOR_EVENT_GRAVITY; } break; case Sensor.TYPE_GYROSCOPE: //X,Y,Z m_sGyro = Float.toString(event.values[0]) + "," + Float.toString(event.values[1]) + "," + Float.toString(event.values[2]) + ","; nSensorReadingType = SENSOR_EVENT_GYRO; break; case Sensor.TYPE_MAGNETIC_FIELD: // Values are in micro-Tesla (uT) and measure the ambient magnetic field if (m_blnMagnetEnabled) { m_sMagnet = Float.toString(event.values[0]) + "," + Float.toString(event.values[1]) + "," + Float.toString(event.values[2]) + ","; nSensorReadingType = SENSOR_EVENT_MAGNET; } // if (m_blnOrientEnabled) { // m_arrfMagnetValues = event.values.clone(); // // if (calculateOrientation()) { // //Azimuth (rotation around z-axis); Pitch (rotation around x-axis), Roll (rotation around y-axis) // m_sOrient = Float.toString(m_arrfOrientValues[0]) + "," + // Float.toString(m_arrfOrientValues[1]) + "," + // Float.toString(m_arrfOrientValues[2]) + ","; // // if (nSensorReadingType != SENSOR_EVENT_MAGNET) { // nSensorReadingType = SENSOR_EVENT_ORIENT; // } // } // } break; case Sensor.TYPE_ROTATION_VECTOR: //Added on 20150910 if (m_blnOrientEnabled) { float[] arrfRotVal = new float[3]; float[] arrfR = new float[9]; float[] arrfValues = new float[3]; try { System.arraycopy(event.values, 0, arrfRotVal, 0, event.values.length); } catch (IllegalArgumentException e) { //Hardcode the size to handle a bug on Samsung devices System.arraycopy(event.values, 0, arrfRotVal, 0, 3); } SensorManager.getRotationMatrixFromVector(arrfR, arrfRotVal); SensorManager.getOrientation(arrfR, arrfValues); m_arrfOrientValues[0] = (float) Math.toDegrees(arrfValues[0]); m_arrfOrientValues[1] = (float) Math.toDegrees(arrfValues[1]); m_arrfOrientValues[2] = (float) Math.toDegrees(arrfValues[2]); if (m_arrfOrientValues[0] < 0) { m_arrfOrientValues[0] = m_arrfOrientValues[0] + 360; // Make Azimuth 0 ~ 360 } // //Azimuth (rotation around z-axis); Pitch (rotation around x-axis), Roll (rotation around y-axis) m_sOrient = Float.toString(m_arrfOrientValues[0]) + "," + Float.toString(m_arrfOrientValues[1]) + "," + Float.toString(m_arrfOrientValues[2]) + ","; //m_tvGpsUp.setText(m_sOrient); //Show orientation nSensorReadingType = SENSOR_EVENT_ORIENT; } break; case Sensor.TYPE_LIGHT: // Ambient light level in SI lux units m_sLight = Float.toString(event.values[0]) + ","; nSensorReadingType = SENSOR_EVENT_LIGHT; break; case Sensor.TYPE_PRESSURE: // Atmospheric pressure in hPa (millibar) m_sBarometer = Float.toString(event.values[0]) + ","; nSensorReadingType = SENSOR_EVENT_BAROMETER; break; } } } else if (nSensorDataType == DATA_TYPE_GPS) { Location locationGps; locationGps = senData.getGpsLocation(); if (locationGps != null) { m_location = new Location(locationGps); //Change from double to float m_sGPS = Float.valueOf((float) (locationGps.getLatitude())).toString() + "," + Float.valueOf((float) (locationGps.getLongitude())).toString() + ","; if (locationGps.hasAltitude()) { m_sGPS = m_sGPS + Float.valueOf((float) (locationGps.getAltitude())).toString() + ","; GeomagneticField geoField = new GeomagneticField( Double.valueOf(locationGps.getLatitude()).floatValue(), Double.valueOf(locationGps.getLongitude()).floatValue(), Double.valueOf(locationGps.getAltitude()).floatValue(), System.currentTimeMillis()); // Append Declination, in Degree m_sGPS = m_sGPS + Float.valueOf((float) (geoField.getDeclination())).toString() + "," + Float.valueOf((float) (geoField.getInclination())).toString() + ","; } else { m_sGPS = m_sGPS + ",,,"; //m_sGPS = m_sGPS + ","; } //New add 201408270009 if (locationGps.hasSpeed()) { m_sGPS = m_sGPS + Float.valueOf((float) (locationGps.getSpeed())).toString() + ","; } else { m_sGPS = m_sGPS + ","; } if (locationGps.hasBearing()) { m_sGPS = m_sGPS + Float.valueOf((float) (locationGps.getBearing())).toString() + ","; } else { m_sGPS = m_sGPS + ","; } nSensorReadingType = SENSOR_EVENT_GPS; m_blnGpsUp = true; show_screen5_GpsUp(); } else { m_blnGpsUp = false; show_screen5_GpsUp(); } } else if (nSensorDataType == DATA_TYPE_MIC) { double fSoundLevelDb; fSoundLevelDb = senData.getSoundLevelDb(); m_sSouldLevel = new BigDecimal(fSoundLevelDb).setScale(0, BigDecimal.ROUND_HALF_UP) + ","; nSensorReadingType = SENSOR_EVENT_MIC; } else if (nSensorDataType == DATA_TYPE_CELLULAR) { int nCellId; nCellId = senData.getCellId(); m_sCellId = Integer.valueOf(nCellId).toString() + ","; nSensorReadingType = SENSOR_EVENT_CELLULAR; } else if (nSensorDataType == DATA_TYPE_WIFI) { List<WifiData> lstWifiData = senData.getListWifiData(); int nWifiCnt = Math.min(WIFI_COUNT, lstWifiData.size()); m_sWifi = ""; for (i = 0; i < nWifiCnt; i++) { //m_sWifi = m_sWifi + lstWifiData.get(i).getSSID() + "," + lstWifiData.get(i).getBSSID() + "," + lstWifiData.get(i).getSignalLevel() + ","; m_sWifi = m_sWifi + lstWifiData.get(i).getBSSID() + "," + lstWifiData.get(i).getSignalLevel() + ","; } for (i = 1; i <= WIFI_COUNT - nWifiCnt; i++) { //m_sWifi = m_sWifi + ",,,"; m_sWifi = m_sWifi + ",,"; } nSensorReadingType = SENSOR_EVENT_WIFI; } if (nSensorReadingType == SENSOR_EVENT_NULL) { return; } sRecordLine = sTimeField + ","; if (m_blnNoLabel == false) { sRecordLine = sRecordLine + m_sCurrentLabel + ","; } sRecordLine = sRecordLine + Integer.valueOf(nSensorReadingType) + ","; //New: Every field always there //Field in each line: /* * 1) Timestamp * 2) Label * 3) SensingEventType * 4-6) Accl * 7-9) Linear Accl * 10-12) Gravity * 13-15) Gyro * 16-18) Orientation * 19-21) Magnet * 22) Light * 23) Barometer * 24) Sould Level (Decibel) * 25) Cell ID * 26-32) GPS (Lat, Long, Alt, Declination, Inclination, Speed, Bearing) * 33-72) WiFi (<BSSID, Level>) */ // sRecordLine = sRecordLine + m_sAccl + m_sGyro + m_sOrient + m_sMagnet + // m_sLight + m_sBarometer + // m_sSouldLevel + m_sCellId + // m_sGPS + m_sWifi; sRecordLine = sRecordLine + m_sAccl + m_sLinearAccl + m_sGravity + m_sGyro + m_sOrient + m_sMagnet + m_sLight + m_sBarometer + m_sSouldLevel + m_sCellId + m_sGPS + m_sWifi; //////////////////////////// // String sAngle = calculateRot(m_sAccl, m_sGravity); // String sarrAngle[] = sAngle.split(","); // String sShow = sarrAngle[0] + "\n" + sarrAngle[1]; // String sShow = ""; // if (m_sGravity.length() > 3) { // String sarrAngle[] = m_sGravity.split(","); // double fX = Double.valueOf(sarrAngle[0]).doubleValue(); // double fY = Double.valueOf(sarrAngle[1]).doubleValue(); // double fZ = Double.valueOf(sarrAngle[2]).doubleValue(); // // double fTotal = Math.sqrt(fX*fX + fY*fY + fZ*fZ); // // double fAngleZ = Math.acos(fZ/fTotal)/Math.PI*180; // double fAngleY = 90 - Math.acos(fY/fTotal)/Math.PI*180; // double fAngleX = 90 - Math.acos(fX/fTotal)/Math.PI*180; // // sShow = "X: " + fAngleX + "\n"; // sShow = sShow + "Y: " + fAngleY + "\n"; // sShow = sShow + "Z: " + fAngleZ; // // // } // if (m_sGravity.length() > 3) { // String sarrAngle[] = m_sGravity.split(","); // double fX = Double.valueOf(sarrAngle[0]).doubleValue(); // double fY = Double.valueOf(sarrAngle[1]).doubleValue(); // double fZ = Double.valueOf(sarrAngle[2]).doubleValue(); // // int nSymbol = 0; // if (fX < 0) { // sShow = sShow + "- X" + "\n"; // } else if (fX > 0) { // sShow = sShow + "+ X" + "\n"; // } // // if (fY < 0) { // sShow = sShow + "- Y" + "\n"; // } else if (fY > 0) { // sShow = sShow + "+ Y" + "\n"; // } // // if (fZ < 0) { // sShow = sShow + "- Z"; // } else if (fZ > 0) { // sShow = sShow + "+ Z"; // } // // } // // if (m_sGyro.length() > 3) { // String sarrAngle[] = m_sGyro.split(","); // double fX = Double.valueOf(sarrAngle[0]).doubleValue(); // double fY = Double.valueOf(sarrAngle[1]).doubleValue(); // double fZ = Double.valueOf(sarrAngle[2]).doubleValue(); // // int nSymbol = 0; // if (fX < 0) { // nSymbol = -1; // } else if (fX > 0) { // nSymbol = 1; // } // // if (fY < 0) { // nSymbol = nSymbol + (-1); // } else if (fY > 0) { // nSymbol = nSymbol + 1; // } // // if (fZ < 0) { // nSymbol = nSymbol + (-1); // } else if (fZ > 0) { // nSymbol = nSymbol + 1; // } // // if (nSymbol < 0) { // nSymbol = -1; // } else if (nSymbol > 0) { // nSymbol = 1; // } // // sShow = sShow + "\n\n" + nSymbol + ""; // } // m_tvSensingInfo.setText(sShow); //////////////////////////// sRecordLine = sRecordLine + System.getProperty("line.separator"); if (m_fwSensorRecord != null) { //Write information into file //Compose information into recordLine try { m_fwSensorRecord.write(sRecordLine); } catch (IOException e) { } } }