List of usage examples for android.opengl Matrix rotateM
public static void rotateM(float[] m, int mOffset, float a, float x, float y, float z)
From source file:Main.java
public static void rotateModel(float[] mModelMatrix, Float x, Float y, Float z, boolean rotateAroundCenter, Float width, Float length, Float height) { // translation for rotating the model around its center if (rotateAroundCenter) { Matrix.translateM(mModelMatrix, 0, length / 2f, width / 2f, height / 2f); }/*from www.j av a 2 s .co m*/ if (x != null) { Matrix.rotateM(mModelMatrix, 0, x, 1.0f, 0.0f, 0.0f); } if (y != null) { Matrix.rotateM(mModelMatrix, 0, y, 0.0f, 1.0f, 0.0f); } if (z != null) { Matrix.rotateM(mModelMatrix, 0, z, 0.0f, 0.0f, 1.0f); } // translation back to the origin if (rotateAroundCenter) { Matrix.translateM(mModelMatrix, 0, -length / 2, -width / 2f, -height / 2f); } }
From source file:Main.java
public static void sensorRotation2Matrix(float[] gravity, float[] geomagnetic, int rotation, float[] output) { switch (rotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: /* Notice: not supported for ROTATION_180! */ SensorManager.getRotationMatrix(output, null, gravity, geomagnetic); break;// ww w . ja va 2 s . c o m case Surface.ROTATION_90: SensorManager.getRotationMatrix(mTmp, null, gravity, geomagnetic); SensorManager.remapCoordinateSystem(mTmp, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, output); break; case Surface.ROTATION_270: SensorManager.getRotationMatrix(mTmp, null, gravity, geomagnetic); SensorManager.remapCoordinateSystem(mTmp, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, output); break; } Matrix.rotateM(output, 0, 90.0F, 1.0F, 0.0F, 0.0F); }
From source file:Main.java
public static void sensorRotationVector2Matrix(SensorEvent event, int rotation, float[] output) { float[] values = event.values; switch (rotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: /* Notice: not supported for ROTATION_180! */ SensorManager.getRotationMatrixFromVector(output, values); break;/*w w w . ja v a 2 s .com*/ case Surface.ROTATION_90: SensorManager.getRotationMatrixFromVector(mTmp, values); SensorManager.remapCoordinateSystem(mTmp, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, output); break; case Surface.ROTATION_270: SensorManager.getRotationMatrixFromVector(mTmp, values); SensorManager.remapCoordinateSystem(mTmp, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, output); break; } Matrix.rotateM(output, 0, 90.0F, 1.0F, 0.0F, 0.0F); }
From source file:Main.java
public static void sensorRotationVector2Matrix(SensorEvent event, int rotation, float[] output) { if (!sIsTruncated) { try {//from www . j av a2s .co m SensorManager.getRotationMatrixFromVector(sUIThreadTmp, event.values); } catch (Exception e) { // On some Samsung devices, SensorManager#getRotationMatrixFromVector throws an exception // if the rotation vector has more than 4 elements. Since only the four first elements are used, // we can truncate the vector without losing precision. Log.e(TAG, "maybe Samsung bug, will truncate vector"); sIsTruncated = true; } } if (sIsTruncated) { System.arraycopy(event.values, 0, sTruncatedVector, 0, 4); SensorManager.getRotationMatrixFromVector(sUIThreadTmp, sTruncatedVector); } float[] values = event.values; switch (rotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: /* Notice: not supported for ROTATION_180! */ SensorManager.getRotationMatrixFromVector(output, values); break; case Surface.ROTATION_90: SensorManager.getRotationMatrixFromVector(sUIThreadTmp, values); SensorManager.remapCoordinateSystem(sUIThreadTmp, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, output); break; case Surface.ROTATION_270: SensorManager.getRotationMatrixFromVector(sUIThreadTmp, values); SensorManager.remapCoordinateSystem(sUIThreadTmp, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, output); break; } Matrix.rotateM(output, 0, 90.0F, 1.0F, 0.0F, 0.0F); }
From source file:com.kentdisplays.synccardboarddemo.Page.java
/** * Sets up the drawing object data for use in an OpenGL ES context. * * @param is InputStream to the page to load the path data from. *///from www.j a va 2s. c o m public Page(InputStream is, int glProgram, int direction) { this.mModel = new float[16]; this.mGlProgram = glProgram; // Calculate the coordinates from the given path. ArrayList<Path> paths = pathsFromSamplePageInputStream(is); float finalCoords[] = {}; float finalNormals[] = {}; float finalColors[] = {}; mNumberOfPaths = paths.size(); for (int i = 0; i < mNumberOfPaths; i++) { Path path = paths.get(i); float x1 = (path.x1 / 13942 * 2) - 1; float y1 = (path.y1 / 20280 * 2) - 1; float x2 = (path.x2 / 13942 * 2) - 1; float y2 = (path.y2 / 20280 * 2) - 1; float width = path.width / 3000; width = width < 0.013f ? 0.013f : width; // Width should be at least 0.013 float distance = (float) Math.sqrt(Math.pow(x2 - x1, 2) + Math.pow(y2 - y1, 2)); float angle = (float) Math.PI / 2 - (float) Math.asin((x2 - x1) / distance); float xdiff = (width / 2) * (float) Math.sin(angle); float ydiff = (width / 2) * (float) Math.cos(angle); float coords[] = { x1 - xdiff, y1 - ydiff, 1.0f, // top left x2 - xdiff, y2 - ydiff, 1.0f, // bottom left x1 + xdiff, y1 + ydiff, 1.0f, // top right x2 - xdiff, y2 - ydiff, 1.0f, // bottom left x2 + xdiff, y2 + ydiff, 1.0f, // bottom right x1 + xdiff, y1 + ydiff, 1.0f, // top right }; float normals[] = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, }; float colors[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f, 0.2f, 0.709803922f, 0.898039216f, 1.0f, 0.2f, 0.709803922f, 0.898039216f, 1.0f, 0.2f, 0.709803922f, 0.898039216f, 1.0f, 0.2f, 0.709803922f, 0.898039216f, 1.0f, 0.2f, 0.709803922f, 0.898039216f, 1.0f, }; finalCoords = Floats.concat(finalCoords, coords); finalNormals = Floats.concat(finalNormals, normals); finalColors = Floats.concat(finalColors, colors); } ByteBuffer bbVertices = ByteBuffer.allocateDirect(finalCoords.length * 4); bbVertices.order(ByteOrder.nativeOrder()); mPageVertices = bbVertices.asFloatBuffer(); mPageVertices.put(finalCoords); mPageVertices.position(0); ByteBuffer bbNormals = ByteBuffer.allocateDirect(finalNormals.length * 4); bbNormals.order(ByteOrder.nativeOrder()); mPageNormals = bbNormals.asFloatBuffer(); mPageNormals.put(finalNormals); mPageNormals.position(0); ByteBuffer bbColors = ByteBuffer.allocateDirect(finalColors.length * 4); bbColors.order(ByteOrder.nativeOrder()); mPageColors = bbColors.asFloatBuffer(); mPageColors.put(finalColors); mPageColors.position(0); // Correctly place the page in the world. Matrix.setIdentityM(mModel, 0); switch (direction) { case 0: Matrix.translateM(mModel, 0, 0, 0, -mDistance); //Front. break; case 1: Matrix.translateM(mModel, 0, -mDistance, 0, 0); // Left. Matrix.rotateM(mModel, 0, 90, 0, 1f, 0); break; case 2: Matrix.translateM(mModel, 0, 0, 0, mDistance); // Behind. Matrix.rotateM(mModel, 0, 180, 0, 1f, 0); break; case 3: Matrix.translateM(mModel, 0, mDistance, 0, 0); // Right. Matrix.rotateM(mModel, 0, 270, 0, 1f, 0); break; } }
From source file:com.projecttango.examples.java.openglar.OpenGlAugmentedRealityActivity.java
/** * Here is where you would set up your rendering logic. We're replacing it with a minimalistic, * dummy example, using a standard GLSurfaceView and a basic renderer, for illustration purposes * only.//from w w w.j av a 2 s.co m */ private void setupRenderer() { mSurfaceView.setEGLContextClientVersion(2); mRenderer = new OpenGlAugmentedRealityRenderer(this, new OpenGlAugmentedRealityRenderer.RenderCallback() { private double lastRenderedTimeStamp; @Override public void preRender() { // This is the work that you would do on your main OpenGL render thread. try { // Synchronize against concurrently disconnecting the service triggered // from the UI thread. synchronized (OpenGlAugmentedRealityActivity.this) { // We need to be careful not to run any Tango-dependent code in the // OpenGL thread unless we know the Tango Service is properly // set up and connected. if (!mIsConnected) { return; } // Set up scene camera projection to match RGB camera intrinsics. if (!mRenderer.isProjectionMatrixConfigured()) { TangoCameraIntrinsics intrinsics = TangoSupport .getCameraIntrinsicsBasedOnDisplayRotation( TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mDisplayRotation); mRenderer.setProjectionMatrix(projectionMatrixFromCameraIntrinsics(intrinsics)); } // Connect the Tango SDK to the OpenGL texture ID where we are // going to render the camera. // NOTE: This must be done after both the texture is generated // and the Tango Service is connected. if (mConnectedTextureIdGlThread != mRenderer.getTextureId()) { mTango.connectTextureId(TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mRenderer.getTextureId()); mConnectedTextureIdGlThread = mRenderer.getTextureId(); Log.d(TAG, "connected to texture id: " + mRenderer.getTextureId()); } // If there is a new RGB camera frame available, update the texture // and scene camera pose. if (mIsFrameAvailableTangoThread.compareAndSet(true, false)) { // {@code mRgbTimestampGlThread} contains the exact timestamp at // which the rendered RGB frame was acquired. mRgbTimestampGlThread = mTango.updateTexture(TangoCameraIntrinsics.TANGO_CAMERA_COLOR); // Get the transform from color camera to Start of Service // at the timestamp of the RGB image in OpenGL coordinates. // // When drift correction mode is enabled in config file, we need // to query the device with respect to Area Description pose in // order to use the drift-corrected pose. // // Note that if you don't want to use the drift corrected pose, // the normal device with respect to start of service pose is // still available. TangoSupport.TangoMatrixTransformData transform = TangoSupport.getMatrixTransformAtTime( mRgbTimestampGlThread, TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION, TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR, TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL, TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL, mDisplayRotation); if (transform.statusCode == TangoPoseData.POSE_VALID) { mRenderer.updateViewMatrix(transform.matrix); double deltaTime = mRgbTimestampGlThread - lastRenderedTimeStamp; lastRenderedTimeStamp = mRgbTimestampGlThread; // Set the earth rotation around itself. float[] openGlTEarth = new float[16]; Matrix.rotateM(mEarthMoonCenterTEarth, 0, (float) deltaTime * 360 / 10, 0, 1, 0); Matrix.multiplyMM(openGlTEarth, 0, mOpenGLTEarthMoonCenter, 0, mEarthMoonCenterTEarth, 0); // Set moon rotation around the earth and moon center. float[] openGlTMoon = new float[16]; Matrix.rotateM(mEarthMoonCenterTMoonRotation, 0, (float) deltaTime * 360 / 50, 0, 1, 0); float[] mEarthTMoon = new float[16]; Matrix.multiplyMM(mEarthTMoon, 0, mEarthMoonCenterTMoonRotation, 0, mEarthMoonCenterTTranslation, 0); Matrix.multiplyMM(openGlTMoon, 0, mOpenGLTEarthMoonCenter, 0, mEarthTMoon, 0); mRenderer.setEarthTransform(openGlTEarth); mRenderer.setMoonTransform(openGlTMoon); } else { // When the pose status is not valid, it indicates tracking // has been lost. In this case, we simply stop rendering. // // This is also the place to display UI to suggest that the // user walk to recover tracking. Log.w(TAG, "Could not get a valid transform at time " + mRgbTimestampGlThread); } } } // Avoid crashing the application due to unhandled exceptions. } catch (TangoErrorException e) { Log.e(TAG, "Tango API call error within the OpenGL render thread", e); } catch (Throwable t) { Log.e(TAG, "Exception on the OpenGL thread", t); } } }); // Set the starting position and orientation of the Earth and Moon with respect to the // OpenGL frame. Matrix.setIdentityM(mOpenGLTEarthMoonCenter, 0); Matrix.translateM(mOpenGLTEarthMoonCenter, 0, 0, 0, -1f); Matrix.setIdentityM(mEarthMoonCenterTEarth, 0); Matrix.setIdentityM(mEarthMoonCenterTMoonRotation, 0); Matrix.setIdentityM(mEarthMoonCenterTTranslation, 0); Matrix.translateM(mEarthMoonCenterTTranslation, 0, 0.5f, 0, 0); mSurfaceView.setRenderer(mRenderer); }
From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java
private void updateMiniCubePosition(float pitch, float yaw, float roll) { float[] temp_modelMiniCube = new float[16]; float[] temp_mRotate = new float[16]; Matrix.setIdentityM(temp_modelMiniCube, 0); Matrix.translateM(temp_modelMiniCube, 0, -handPos[0] / (float) 50.0, -handPos[2] / (float) 50.0, -handPos[1] / (float) 50.0); Matrix.setIdentityM(temp_mRotate, 0); Matrix.rotateM(temp_mRotate, 0, 45, 1, 0, 0); //This rotates the cube //quaternionToMatrix(temp_mRotate, x, y, z, w); Matrix.multiplyMM(modelMiniCube, 0, temp_mRotate, 0, temp_modelMiniCube, 0); }
From source file:com.projecttango.examples.java.modelcorrespondence.ModelCorrespondenceActivity.java
/** * Calculate the transform needed to place the model in the upper left corner of the camera, * and rotate it to show the next point to make the correspondence. *//*from www . j ava 2 s. c o m*/ private float[] calculateModelTransformFixedToCam(int mDisplayRotation) { // Translate to the upper left corner and ahead of the cam if the device is in landscape // mode or to the upper center if it is in portrait mode. float[] rgbTHouse = new float[16]; Matrix.setIdentityM(rgbTHouse, 0); if (mDisplayRotation == Surface.ROTATION_0 || mDisplayRotation == Surface.ROTATION_180) { Matrix.translateM(rgbTHouse, 0, 0f, 1.2f, -4); } else { Matrix.translateM(rgbTHouse, 0, -1.5f, 0.3f, -4); } // Rotate it 180 degrees around the Z axis to show the front of the house as default // orientation. Matrix.rotateM(rgbTHouse, 0, 180, 0, 0, 1); // Rotate it around the X axis so it looks better as seen from above. Matrix.rotateM(rgbTHouse, 0, 70, 1, 0, 0); // Rotate it around the Z axis to show the next correspondence point to be added. Matrix.rotateM(rgbTHouse, 0, -mModelZRotation, 0, 0, 1); // Scale it to a proper size. Matrix.scaleM(rgbTHouse, 0, 0.03f, 0.03f, 0.03f); return rgbTHouse; }