List of usage examples for android.opengl Matrix setIdentityM
public static void setIdentityM(float[] sm, int smOffset)
From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java
private void updateMiniCubePosition(float x, float y, float z, float w) { //Matrix.setIdentityM(modelMiniCube, 0); //We normalize the distance as well //Matrix.translateM(modelMiniCube, 0, handPos[0]/(float)50.0, handPos[1]/(float)50.0, handPos[2]/(float)50.0); //This accounts for the reversing //Matrix.translateM(modelMiniCube, 0, -handPos[0]/(float)50.0, -handPos[2]/(float)50.0, -handPos[1]/(float)50.0); float[] temp_modelMiniCube = new float[16]; float[] temp_mRotate = new float[16]; Matrix.setIdentityM(temp_modelMiniCube, 0); Matrix.translateM(temp_modelMiniCube, 0, -handPos[0] / (float) 50.0, -handPos[2] / (float) 50.0, -handPos[1] / (float) 50.0); //Matrix.setIdentityM(temp_mRotate, 0); //Matrix.rotateM(temp_mRotate, 0, 45, 1, 0, 0); //This rotates the cube quaternionToMatrix(temp_mRotate, -x, -y, -z, w); Matrix.multiplyMM(modelMiniCube, 0, temp_mRotate, 0, temp_modelMiniCube, 0); Log.i("Armo", String.format("%f", -handPos[1] / (float) 50)); if (-handPos[1] / (float) 50 < -WALL_DIST + 1) { //offset because this is cube center, we want the wall position sendArmoRequest(true);// w ww. j a v a2s . c o m Log.i("Armo", "Sending_lock"); } else { sendArmoRequest(false); Log.i("Armo", "Sending_unlock"); } }
From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java
private void updateMiniCubePosition(float pitch, float yaw, float roll) { float[] temp_modelMiniCube = new float[16]; float[] temp_mRotate = new float[16]; Matrix.setIdentityM(temp_modelMiniCube, 0); Matrix.translateM(temp_modelMiniCube, 0, -handPos[0] / (float) 50.0, -handPos[2] / (float) 50.0, -handPos[1] / (float) 50.0); Matrix.setIdentityM(temp_mRotate, 0); Matrix.rotateM(temp_mRotate, 0, 45, 1, 0, 0); //This rotates the cube //quaternionToMatrix(temp_mRotate, x, y, z, w); Matrix.multiplyMM(modelMiniCube, 0, temp_mRotate, 0, temp_modelMiniCube, 0); }
From source file:com.projecttango.examples.java.openglar.OpenGlAugmentedRealityActivity.java
/** * Here is where you would set up your rendering logic. We're replacing it with a minimalistic, * dummy example, using a standard GLSurfaceView and a basic renderer, for illustration purposes * only./*ww w. j a v a 2 s .c o m*/ */ private void setupRenderer() { mSurfaceView.setEGLContextClientVersion(2); mRenderer = new OpenGlAugmentedRealityRenderer(this, new OpenGlAugmentedRealityRenderer.RenderCallback() { private double lastRenderedTimeStamp; @Override public void preRender() { // This is the work that you would do on your main OpenGL render thread. try { // Synchronize against concurrently disconnecting the service triggered // from the UI thread. synchronized (OpenGlAugmentedRealityActivity.this) { // We need to be careful not to run any Tango-dependent code in the // OpenGL thread unless we know the Tango Service is properly // set up and connected. if (!mIsConnected) { return; } // Set up scene camera projection to match RGB camera intrinsics. if (!mRenderer.isProjectionMatrixConfigured()) { TangoCameraIntrinsics intrinsics = TangoSupport .getCameraIntrinsicsBasedOnDisplayRotation( TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mDisplayRotation); mRenderer.setProjectionMatrix(projectionMatrixFromCameraIntrinsics(intrinsics)); } // Connect the Tango SDK to the OpenGL texture ID where we are // going to render the camera. // NOTE: This must be done after both the texture is generated // and the Tango Service is connected. if (mConnectedTextureIdGlThread != mRenderer.getTextureId()) { mTango.connectTextureId(TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mRenderer.getTextureId()); mConnectedTextureIdGlThread = mRenderer.getTextureId(); Log.d(TAG, "connected to texture id: " + mRenderer.getTextureId()); } // If there is a new RGB camera frame available, update the texture // and scene camera pose. if (mIsFrameAvailableTangoThread.compareAndSet(true, false)) { // {@code mRgbTimestampGlThread} contains the exact timestamp at // which the rendered RGB frame was acquired. mRgbTimestampGlThread = mTango.updateTexture(TangoCameraIntrinsics.TANGO_CAMERA_COLOR); // Get the transform from color camera to Start of Service // at the timestamp of the RGB image in OpenGL coordinates. // // When drift correction mode is enabled in config file, we need // to query the device with respect to Area Description pose in // order to use the drift-corrected pose. // // Note that if you don't want to use the drift corrected pose, // the normal device with respect to start of service pose is // still available. TangoSupport.TangoMatrixTransformData transform = TangoSupport.getMatrixTransformAtTime( mRgbTimestampGlThread, TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION, TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR, TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL, TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL, mDisplayRotation); if (transform.statusCode == TangoPoseData.POSE_VALID) { mRenderer.updateViewMatrix(transform.matrix); double deltaTime = mRgbTimestampGlThread - lastRenderedTimeStamp; lastRenderedTimeStamp = mRgbTimestampGlThread; // Set the earth rotation around itself. float[] openGlTEarth = new float[16]; Matrix.rotateM(mEarthMoonCenterTEarth, 0, (float) deltaTime * 360 / 10, 0, 1, 0); Matrix.multiplyMM(openGlTEarth, 0, mOpenGLTEarthMoonCenter, 0, mEarthMoonCenterTEarth, 0); // Set moon rotation around the earth and moon center. float[] openGlTMoon = new float[16]; Matrix.rotateM(mEarthMoonCenterTMoonRotation, 0, (float) deltaTime * 360 / 50, 0, 1, 0); float[] mEarthTMoon = new float[16]; Matrix.multiplyMM(mEarthTMoon, 0, mEarthMoonCenterTMoonRotation, 0, mEarthMoonCenterTTranslation, 0); Matrix.multiplyMM(openGlTMoon, 0, mOpenGLTEarthMoonCenter, 0, mEarthTMoon, 0); mRenderer.setEarthTransform(openGlTEarth); mRenderer.setMoonTransform(openGlTMoon); } else { // When the pose status is not valid, it indicates tracking // has been lost. In this case, we simply stop rendering. // // This is also the place to display UI to suggest that the // user walk to recover tracking. Log.w(TAG, "Could not get a valid transform at time " + mRgbTimestampGlThread); } } } // Avoid crashing the application due to unhandled exceptions. } catch (TangoErrorException e) { Log.e(TAG, "Tango API call error within the OpenGL render thread", e); } catch (Throwable t) { Log.e(TAG, "Exception on the OpenGL thread", t); } } }); // Set the starting position and orientation of the Earth and Moon with respect to the // OpenGL frame. Matrix.setIdentityM(mOpenGLTEarthMoonCenter, 0); Matrix.translateM(mOpenGLTEarthMoonCenter, 0, 0, 0, -1f); Matrix.setIdentityM(mEarthMoonCenterTEarth, 0); Matrix.setIdentityM(mEarthMoonCenterTMoonRotation, 0); Matrix.setIdentityM(mEarthMoonCenterTTranslation, 0); Matrix.translateM(mEarthMoonCenterTTranslation, 0, 0.5f, 0, 0); mSurfaceView.setRenderer(mRenderer); }
From source file:com.projecttango.examples.java.occlusion.OcclusionActivity.java
/** * Calculates a transformation matrix based on a point, a normal and the up gravity vector. * The coordinate frame of the target transformation will be a right handed system with Z+ in * the direction of the normal and Y+ up. *//*ww w . ja v a 2s . c o m*/ private float[] matrixFromPointNormalUp(double[] point, double[] normal, float[] up) { float[] zAxis = new float[] { (float) normal[0], (float) normal[1], (float) normal[2] }; normalize(zAxis); float[] xAxis = crossProduct(up, zAxis); normalize(xAxis); float[] yAxis = crossProduct(zAxis, xAxis); normalize(yAxis); float[] m = new float[16]; Matrix.setIdentityM(m, 0); m[0] = xAxis[0]; m[1] = xAxis[1]; m[2] = xAxis[2]; m[4] = yAxis[0]; m[5] = yAxis[1]; m[6] = yAxis[2]; m[8] = zAxis[0]; m[9] = zAxis[1]; m[10] = zAxis[2]; m[12] = (float) point[0]; m[13] = (float) point[1]; m[14] = (float) point[2]; return m; }
From source file:com.projecttango.examples.java.modelcorrespondence.ModelCorrespondenceActivity.java
/** * Calculate the transform needed to place the model in the upper left corner of the camera, * and rotate it to show the next point to make the correspondence. *///from w w w.jav a 2 s. com private float[] calculateModelTransformFixedToCam(int mDisplayRotation) { // Translate to the upper left corner and ahead of the cam if the device is in landscape // mode or to the upper center if it is in portrait mode. float[] rgbTHouse = new float[16]; Matrix.setIdentityM(rgbTHouse, 0); if (mDisplayRotation == Surface.ROTATION_0 || mDisplayRotation == Surface.ROTATION_180) { Matrix.translateM(rgbTHouse, 0, 0f, 1.2f, -4); } else { Matrix.translateM(rgbTHouse, 0, -1.5f, 0.3f, -4); } // Rotate it 180 degrees around the Z axis to show the front of the house as default // orientation. Matrix.rotateM(rgbTHouse, 0, 180, 0, 0, 1); // Rotate it around the X axis so it looks better as seen from above. Matrix.rotateM(rgbTHouse, 0, 70, 1, 0, 0); // Rotate it around the Z axis to show the next correspondence point to be added. Matrix.rotateM(rgbTHouse, 0, -mModelZRotation, 0, 0, 1); // Scale it to a proper size. Matrix.scaleM(rgbTHouse, 0, 0.03f, 0.03f, 0.03f); return rgbTHouse; }
From source file:com.tumblr.cardboard.Tumblr3DActivity.java
/** * Loads a bitmap into OpenGL./* ww w. j a v a2 s.c o m*/ * * @param texIndex the desired texture index * @param bitmap the bitmap to put into OpenGL */ private void loadTextureInternal(int texIndex, Bitmap bitmap, boolean recycle) { GLES20.glGenTextures(1, mTextureIds, texIndex); Log.d(TAG, "loading texture: " + texIndex + " -> " + mTextureIds[texIndex]); if (mTextureIds[texIndex] != INVALID_TEXTURE && bitmap != null && !bitmap.isRecycled()) { // Set the active texture unit GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + texIndex); Matrix.setIdentityM(mImageRect[texIndex], 0); Matrix.scaleM(mImageRect[texIndex], 0, 1f, (float) bitmap.getHeight() / bitmap.getWidth(), 1f); // Bind to the texture in OpenGL GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[texIndex]); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // Set filtering GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // Load the bitmap into the bound texture. GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); mRectTextureIds[texIndex] = mTextureIds[texIndex]; } else { Log.w(TAG, "Failed to load: " + texIndex); } if (mTextureIds[texIndex] == INVALID_TEXTURE) { Log.e(TAG, "Error loading texture."); } }
From source file:com.tumblr.cardboard.Tumblr3DActivity.java
private void updateTexture(int texIndex, Bitmap bitmap) { if (mTextureIds[texIndex] != INVALID_TEXTURE && bitmap != null && !bitmap.isRecycled()) { // Set the active texture unit GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + texIndex); Matrix.setIdentityM(mImageRect[texIndex], 0); Matrix.scaleM(mImageRect[texIndex], 0, 1f, (float) bitmap.getHeight() / bitmap.getWidth(), 1f); // Bind to the texture in OpenGL GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[texIndex]); // Load the bitmap into the bound texture. GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); } else {/*from ww w .ja v a2 s . c o m*/ Log.w(TAG, "Failed to update: " + texIndex + " val: " + mTextureIds[texIndex]); } }