List of usage examples for android.hardware SensorManager getOrientation
public static float[] getOrientation(float[] R, float[] values)
From source file:com.javadog.cgeowear.WearService.java
/** * Handles compass rotation./*from w w w.j av a 2 s . co m*/ */ @Override public void onSensorChanged(SensorEvent event) { if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { gravity = event.values.clone(); } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { geomagnetic = event.values.clone(); } if (gravity != null && geomagnetic != null) { float[] R = new float[9]; float[] I = new float[9]; boolean success = SensorManager.getRotationMatrix(R, I, gravity, geomagnetic); if (success) { float[] orientation = new float[3]; SensorManager.getOrientation(R, orientation); float azimuth = (float) Math.toDegrees(orientation[0]); float pitch = (float) Math.toDegrees(orientation[1]); if (currentLocation != null) { float smoothedLatitude = smoothSensorValues(oldLatitude, (float) currentLocation.getLatitude(), 1 / 3f); float smoothedLongitude = smoothSensorValues(oldLongitude, (float) currentLocation.getLongitude(), 1 / 3f); float smoothedAltitude = smoothSensorValues(oldAltitude, (float) currentLocation.getAltitude(), 1 / 3f); GeomagneticField geomagneticField = new GeomagneticField(smoothedLatitude, smoothedLongitude, smoothedAltitude, System.currentTimeMillis()); azimuth += geomagneticField.getDeclination(); float bearing = currentLocation.bearingTo(geocacheLocation); float direction = smoothSensorValues(oldDirection, -(azimuth - bearing), 1 / 5f); //If the user puts the phone in his/her pocket upside-down, invert the compass if (pitch > 0) { direction += 180f; } //Set old values to current values (for smoothing) oldDirection = direction; oldLatitude = smoothedLatitude; oldLongitude = smoothedLongitude; oldAltitude = smoothedAltitude; //Send direction update to Android Wear if update interval has passed long currentTime = System.currentTimeMillis(); if ((currentTime - prevTime) > DIRECTION_UPDATE_SPEED) { wearInterface.sendDirectionUpdate(direction); prevTime = currentTime; } } } } }
From source file:com.bolatu.gezkoncsvlogger.GyroOrientation.ImuOKfQuaternion.java
/** * Calculate the fused orientation.//from w ww. j av a 2s . com */ private void calculateFusedOrientation() { qvOrientationGyroscope[0] = (float) quatGyro.getVectorPart()[0]; qvOrientationGyroscope[1] = (float) quatGyro.getVectorPart()[1]; qvOrientationGyroscope[2] = (float) quatGyro.getVectorPart()[2]; qvOrientationGyroscope[3] = (float) quatGyro.getScalarPart(); // Apply the Kalman filter... Note that the prediction and correction // inputs could be swapped, but the filter is much more stable in this // configuration. kalmanFilter.predict(qvOrientationGyroscope); kalmanFilter.correct(qvOrientationAccelMag); // Apply the new gyroscope delta rotation to the new Kalman filter // rotation estimation. quatGyro = new Quaternion(kalmanFilter.getStateEstimation()[3], Arrays.copyOfRange(kalmanFilter.getStateEstimation(), 0, 3)); // Now we get a structure we can pass to get a rotation matrix, and then // an orientation vector from Android. qvFusedOrientation[0] = (float) kalmanFilter.getStateEstimation()[0]; qvFusedOrientation[1] = (float) kalmanFilter.getStateEstimation()[1]; qvFusedOrientation[2] = (float) kalmanFilter.getStateEstimation()[2]; qvFusedOrientation[3] = (float) kalmanFilter.getStateEstimation()[3]; // We need a rotation matrix so we can get the orientation vector... // Getting Euler // angles from a quaternion is not trivial, so this is the easiest way, // but perhaps // not the fastest way of doing this. SensorManager.getRotationMatrixFromVector(rmFusedOrientation, qvFusedOrientation); // Get the fused orienatation SensorManager.getOrientation(rmFusedOrientation, vFusedOrientation); }
From source file:com.kircherelectronics.accelerationexplorer.filter.ImuLaKfQuaternion.java
/** * Calculate the fused orientation.//from w w w . j a v a2s . c o m */ private void calculateFusedOrientation() { vectorGyro[0] = (float) quatGyro.getVectorPart()[0]; vectorGyro[1] = (float) quatGyro.getVectorPart()[1]; vectorGyro[2] = (float) quatGyro.getVectorPart()[2]; vectorGyro[3] = (float) quatGyro.getScalarPart(); // Apply the Kalman filter... Note that the prediction and correction // inputs could be swapped, but the filter is much more stable in this // configuration. kalmanFilter.predict(vectorGyro); kalmanFilter.correct(vectorAccelMag); // Apply the new gyroscope delta rotation to the new Kalman filter // rotation estimation. quatGyro = new Quaternion(kalmanFilter.getStateEstimation()[3], Arrays.copyOfRange(kalmanFilter.getStateEstimation(), 0, 3)); // Now we get a structure we can pass to get a rotation matrix, and then // an orientation vector from Android. fusedVector[0] = (float) kalmanFilter.getStateEstimation()[0]; fusedVector[1] = (float) kalmanFilter.getStateEstimation()[1]; fusedVector[2] = (float) kalmanFilter.getStateEstimation()[2]; fusedVector[3] = (float) kalmanFilter.getStateEstimation()[3]; // We need a rotation matrix so we can get the orientation vector... // Getting Euler // angles from a quaternion is not trivial, so this is the easiest way, // but perhaps // not the fastest way of doing this. SensorManager.getRotationMatrixFromVector(fusedMatrix, fusedVector); // Get the fused orienatation SensorManager.getOrientation(fusedMatrix, fusedOrientation); }
From source file:com.kircherelectronics.accelerationexplorer.filter.ImuLaCfQuaternion.java
/** * Calculate the fused orientation.// w ww.j a va2 s . com */ private void calculateFusedOrientation() { float oneMinusCoeff = (1.0f - filterCoefficient); // Apply the complementary filter. // We multiply each rotation by their // coefficients (scalar matrices)... // Scale our quaternion for the gyroscope quatGyro = quatGyro.multiply(filterCoefficient); // Scale our quaternion for the accel/mag quatAccelMag = quatAccelMag.multiply(1 - oneMinusCoeff); // ...and then add the two quaternions together. // output[0] = alpha * output[0] + (1 - alpha) * input[0]; quatGyro = quatGyro.add(quatAccelMag); // Now we get a structure we can pass to get a rotation matrix, and then // an orientation vector from Android. fusedVector[0] = (float) quatGyro.getVectorPart()[0]; fusedVector[1] = (float) quatGyro.getVectorPart()[1]; fusedVector[2] = (float) quatGyro.getVectorPart()[2]; fusedVector[3] = (float) quatGyro.getScalarPart(); // We need a rotation matrix so we can get the orientation vector... // Getting Euler // angles from a quaternion is not trivial, so this is the easiest way, // but perhaps // not the fastest way of doing this. SensorManager.getRotationMatrixFromVector(fusedMatrix, fusedVector); // Get the fused orienatation SensorManager.getOrientation(fusedMatrix, fusedOrientation); }
From source file:com.thalmic.android.sample.helloworld.HelloWorldActivity.java
public void calculateAngles(float[] result, float[] rVector, float[] accVector, float[] magVector) { //caculate temp rotation matrix from rotation vector first SensorManager.getRotationMatrix(rMatrix, null, accVector, magVector); SensorManager.getQuaternionFromVector(quaternion, rVector); roll = Math.atan2(2.0f * (quaternion[0] * quaternion[1] + quaternion[2] * quaternion[3]), 1.0f - 2.0f * (quaternion[1] * quaternion[1] + quaternion[2] * quaternion[2])); pitch = Math.asin(2.0f * (quaternion[0] * quaternion[2] - quaternion[3] * quaternion[1])); yaw = Math.atan2(2.0f * (quaternion[0] * quaternion[3] + quaternion[1] * quaternion[2]), 1.0f - 2.0f * (quaternion[2] * quaternion[2] + quaternion[3] * quaternion[3])); rollW = ((roll + Math.PI) / (Math.PI * 2.0) * SCALE); pitchW = ((pitch + Math.PI / 2.0) / Math.PI * SCALE); yawW = ((yaw + Math.PI) / (Math.PI * 2.0) * SCALE); //calculate Euler angles now SensorManager.getOrientation(rMatrix, result); //Now we can convert it to degrees convertToDegrees(result);/*from w w w. ja v a2s . co m*/ }
From source file:com.shadowmaps.example.GpsTestActivity.java
@TargetApi(Build.VERSION_CODES.GINGERBREAD) @Override//from ww w .ja v a 2 s . com public void onSensorChanged(SensorEvent event) { double orientation = Double.NaN; double tilt = Double.NaN; switch (event.sensor.getType()) { case Sensor.TYPE_ROTATION_VECTOR: // Modern rotation vector sensors if (!mTruncateVector) { try { SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values); } catch (IllegalArgumentException e) { // On some Samsung devices, an exception is thrown if this vector > 4 (see #39) // Truncate the array, since we can deal with only the first four values Log.e(TAG, "Samsung device error? Will truncate vectors - " + e); mTruncateVector = true; // Do the truncation here the first time the exception occurs getRotationMatrixFromTruncatedVector(event.values); } } else { // Truncate the array to avoid the exception on some devices (see #39) getRotationMatrixFromTruncatedVector(event.values); } int rot = getWindowManager().getDefaultDisplay().getRotation(); switch (rot) { case Surface.ROTATION_0: // No orientation change, use default coordinate system SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-0"); break; case Surface.ROTATION_90: // Log.d(TAG, "Rotation-90"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_180: // Log.d(TAG, "Rotation-180"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_X, SensorManager.AXIS_MINUS_Y, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_270: // Log.d(TAG, "Rotation-270"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; default: // This shouldn't happen - assume default orientation SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-Unknown"); break; } orientation = Math.toDegrees(mValues[0]); // azimuth tilt = Math.toDegrees(mValues[1]); break; case Sensor.TYPE_ORIENTATION: // Legacy orientation sensors orientation = event.values[0]; break; default: // A sensor we're not using, so return return; } // Correct for true north, if preference is set if (mFaceTrueNorth && mGeomagneticField != null) { orientation += mGeomagneticField.getDeclination(); } for (GpsTestListener listener : mGpsTestListeners) { listener.onOrientationChanged(orientation, tilt); } }
From source file:com.thousandthoughts.tutorials.SensorFusionActivity.java
public void calculateAccMagOrientation() { if (SensorManager.getRotationMatrix(rotationMatrix, null, accel, magnet)) { SensorManager.getOrientation(rotationMatrix, accMagOrientation); }//from ww w.j a v a 2 s.c o m }
From source file:com.thousandthoughts.tutorials.SensorFusionActivity.java
public void gyroFunction(SensorEvent event) { // don't start until first accelerometer/magnetometer orientation has been acquired if (accMagOrientation == null) return;/* w ww . j av a2s.co m*/ // initialisation of the gyroscope based rotation matrix if (initState) { float[] initMatrix = new float[9]; initMatrix = getRotationMatrixFromOrientation(accMagOrientation); float[] test = new float[3]; SensorManager.getOrientation(initMatrix, test); gyroMatrix = matrixMultiplication(gyroMatrix, initMatrix); initState = false; } // copy the new gyro values into the gyro array // convert the raw gyro data into a rotation vector float[] deltaVector = new float[4]; if (timestamp != 0) { final float dT = (event.timestamp - timestamp) * NS2S; System.arraycopy(event.values, 0, gyro, 0, 3); getRotationVectorFromGyro(gyro, deltaVector, dT / 2.0f); } // measurement done, save current time for next interval timestamp = event.timestamp; // convert rotation vector into rotation matrix float[] deltaMatrix = new float[9]; SensorManager.getRotationMatrixFromVector(deltaMatrix, deltaVector); // apply the new rotation interval on the gyroscope based rotation matrix gyroMatrix = matrixMultiplication(gyroMatrix, deltaMatrix); // get the gyroscope based orientation from the rotation matrix SensorManager.getOrientation(gyroMatrix, gyroOrientation); }
From source file:com.android.gpstest.GpsTestActivity.java
@TargetApi(Build.VERSION_CODES.GINGERBREAD) @Override//from ww w . j a v a2s . c om public void onSensorChanged(SensorEvent event) { double orientation = Double.NaN; double tilt = Double.NaN; switch (event.sensor.getType()) { case Sensor.TYPE_ROTATION_VECTOR: // Modern rotation vector sensors if (!mTruncateVector) { try { SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values); } catch (IllegalArgumentException e) { // On some Samsung devices, an exception is thrown if this vector > 4 (see #39) // Truncate the array, since we can deal with only the first four values Log.e(TAG, "Samsung device error? Will truncate vectors - " + e); mTruncateVector = true; // Do the truncation here the first time the exception occurs getRotationMatrixFromTruncatedVector(event.values); } } else { // Truncate the array to avoid the exception on some devices (see #39) getRotationMatrixFromTruncatedVector(event.values); } int rot = getWindowManager().getDefaultDisplay().getRotation(); switch (rot) { case Surface.ROTATION_0: // No orientation change, use default coordinate system SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-0"); break; case Surface.ROTATION_90: // Log.d(TAG, "Rotation-90"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_180: // Log.d(TAG, "Rotation-180"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_X, SensorManager.AXIS_MINUS_Y, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_270: // Log.d(TAG, "Rotation-270"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; default: // This shouldn't happen - assume default orientation SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-Unknown"); break; } orientation = Math.toDegrees(mValues[0]); // azimuth tilt = Math.toDegrees(mValues[1]); break; case Sensor.TYPE_ORIENTATION: // Legacy orientation sensors orientation = event.values[0]; break; default: // A sensor we're not using, so return return; } // Correct for true north, if preference is set if (mFaceTrueNorth && mGeomagneticField != null) { orientation += mGeomagneticField.getDeclination(); // Make sure value is between 0-360 orientation = MathUtils.mod((float) orientation, 360.0f); } for (GpsTestListener listener : mGpsTestListeners) { listener.onOrientationChanged(orientation, tilt); } }
From source file:com.example.zoetablet.BasicFragmentActivity.java
public float[] calculateOrientation() { float[] values = new float[3]; float[] R = new float[9]; float[] outR = new float[9]; SensorManager.getRotationMatrix(R, null, aValues, mValues); SensorManager.remapCoordinateSystem(R, SensorManager.AXIS_X, SensorManager.AXIS_Y, outR); SensorManager.getOrientation(outR, values); // Convert from Radians to Degrees. values[0] = (float) Math.toDegrees(values[0]); values[1] = (float) Math.toDegrees(values[1]); values[2] = (float) Math.toDegrees(values[2]); // System.out.println("Compass " + values[0] + " " + values[1] + " " + values[2]); if (values[0] != 0.0 && values[1] != 0.0) { compassV0 = values[0];/*from w w w .j a v a 2s . com*/ compassV1 = values[1]; compassV2 = values[2]; } if (values[0] == 0.0 && values[1] == 0.0) { //If data is spitting out zeros, keep last good value values[0] = compassV0; values[1] = compassV1; values[2] = compassV2; } return values; }