List of usage examples for android.opengl Matrix multiplyMV
public static native void multiplyMV(float[] resultVec, int resultVecOffset, float[] lhsMat, int lhsMatOffset, float[] rhsVec, int rhsVecOffset);
From source file:Main.java
public static float[] multMatrixVector(float[] m1, float[] m2) { // TODO// www .ja va 2 s .c o m float[] m3 = new float[4]; float[] m1i = new float[16]; Matrix.transposeM(m1i, 0, m1, 0); Matrix.multiplyMV(m3, 0, m1i, 0, m2, 0); return m3; }
From source file:com.kentdisplays.synccardboarddemo.MainActivity.java
/** * Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as * a parameter./*ww w .j av a2s .co m*/ * @param transform The transformations to apply to render this eye. */ @Override public void onDrawEye(EyeTransform transform) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); GLES20.glClearColor(0f, 0f, 0f, 1.00f); // Dark background so text shows up well mPositionParam = GLES20.glGetAttribLocation(mGlProgram, "a_Position"); mNormalParam = GLES20.glGetAttribLocation(mGlProgram, "a_Normal"); mColorParam = GLES20.glGetAttribLocation(mGlProgram, "a_Color"); GLES20.glEnableVertexAttribArray(mPositionParam); GLES20.glEnableVertexAttribArray(mNormalParam); GLES20.glEnableVertexAttribArray(mColorParam); checkGLError("mColorParam"); // Apply the eye transformation to the camera. Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0); // Set the position of the light Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0); GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]); // Draw the pages. for (Page page : mPages) { page.draw(transform.getPerspective(), mView); checkGLError("Drawing page"); } // Set mModelView for the floor, so we draw floor in the correct location Matrix.multiplyMM(mModelView, 0, mView, 0, mModelFloor, 0); Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0, mModelView, 0); drawFloor(transform.getPerspective()); }
From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java
/** * Draws a frame for an eye.//from ww w. j a va 2s . c o m * * @param eye The eye to render. Includes all required transformations. */ @Override public void onDrawEye(Eye eye) { GLES20.glEnable(GLES20.GL_DEPTH_TEST); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); checkGLError("colorParam"); // Apply the eye transformation to the camera. Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0); // Set the position of the light Matrix.multiplyMV(lightPosInEyeSpace, 0, view, 0, LIGHT_POS_IN_WORLD_SPACE, 0); // Build the ModelView and ModelViewProjection matrices // for calculating cube position and light. float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR); Matrix.multiplyMM(modelView, 0, view, 0, modelCube, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); drawCube(); float[] mRotationCube = new float[16]; //Not sure if we can just reuse modelView like this Matrix.multiplyMM(modelView, 0, view, 0, modelMiniCube, 0); //Matrix.multiplyMM(mRotationCube, 0, view, 0, modelView, 0); //Not working //float[] ident = new Matrix(eye.getPerspective(Z_NEAR, Z_FAR)); //Matrix.setIdentityM(ident, 0); //Cheating so that we have relative hand movement //end not working Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); //Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, mRotationCube, 0); //doesn't work :( //Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelMiniCube, 0); drawMiniCube(); // Set modelView for the floor, so we draw floor in the correct location Matrix.multiplyMM(modelView, 0, view, 0, modelFloor, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); drawFloor(); }
From source file:com.tumblr.cardboard.Tumblr3DActivity.java
/** * Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as * a parameter./*w ww .ja v a 2 s. c o m*/ * * @param eye The transformations to apply to render this eye. */ @Override public void onDrawEye(Eye eye) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); mPositionParam = GLES20.glGetAttribLocation(mGlProgram, "a_Position"); mNormalParam = GLES20.glGetAttribLocation(mGlProgram, "a_Normal"); mColorParam = GLES20.glGetAttribLocation(mGlProgram, "a_Color"); mRectTextureCoordinateParam = GLES20.glGetAttribLocation(mGlProgram, "a_TexCoordinate"); GLES20.glEnableVertexAttribArray(mPositionParam); GLES20.glEnableVertexAttribArray(mNormalParam); GLES20.glEnableVertexAttribArray(mColorParam); checkGLError("mColorParam"); // Apply the eye transformation to the camera. Matrix.multiplyMM(mView, 0, eye.getEyeView(), 0, mCamera, 0); // Set the position of the light Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0); GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]); // Set mModelView for the floor, so we draw floor in the correct location Matrix.multiplyMM(mModelView, 0, mView, 0, mModelFloor, 0); Matrix.multiplyMM(mModelViewProjection, 0, eye.getPerspective(Z_NEAR, Z_FAR), 0, mModelView, 0); drawFloor(eye.getPerspective(Z_NEAR, Z_FAR)); // Build the ModelView and ModelViewProjection matrices // for calculating rect position and light. for (int i = 0; i < mModelRect.length; i++) { Matrix.multiplyMM(mModelView, 0, mView, 0, mModelRect[i], 0); Matrix.multiplyMM(mModelViewProjection, 0, eye.getPerspective(Z_NEAR, Z_FAR), 0, mModelView, 0); drawRect(i); } }
From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java
/** * Find a new random position for the object. * * <p>We'll rotate it around the Y-axis so it's out of sight, and then up or down by a little bit. */// w w w . j a va2 s .c o m private void hideObject() { float[] rotationMatrix = new float[16]; float[] posVec = new float[4]; // First rotate in XZ plane, between 90 and 270 deg away, and scale so that we vary // the object's distance from the user. float angleXZ = (float) Math.random() * 180 + 90; Matrix.setRotateM(rotationMatrix, 0, angleXZ, 0f, 1f, 0f); float oldObjectDistance = objectDistance; objectDistance = (float) Math.random() * (MAX_MODEL_DISTANCE - MIN_MODEL_DISTANCE) + MIN_MODEL_DISTANCE; float objectScalingFactor = objectDistance / oldObjectDistance; Matrix.scaleM(rotationMatrix, 0, objectScalingFactor, objectScalingFactor, objectScalingFactor); Matrix.multiplyMV(posVec, 0, rotationMatrix, 0, modelCube, 12); float angleY = (float) Math.random() * 80 - 40; // Angle in Y plane, between -40 and 40. angleY = (float) Math.toRadians(angleY); float newY = (float) Math.tan(angleY) * objectDistance; modelPosition[0] = posVec[0]; modelPosition[1] = newY; modelPosition[2] = posVec[2]; updateModelPosition(); }
From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java
/** * Check if user is looking at object by calculating where the object is in eye-space. * * @return true if the user is looking at the object. *//*from w w w . j ava 2 s. c o m*/ private boolean isLookingAtObject() { float[] initVec = { 0, 0, 0, 1.0f }; float[] objPositionVec = new float[4]; // Convert object space to camera space. Use the headView from onNewFrame. Matrix.multiplyMM(modelView, 0, headView, 0, modelCube, 0); Matrix.multiplyMV(objPositionVec, 0, modelView, 0, initVec, 0); float pitch = (float) Math.atan2(objPositionVec[1], -objPositionVec[2]); float yaw = (float) Math.atan2(objPositionVec[0], -objPositionVec[2]); return Math.abs(pitch) < PITCH_LIMIT && Math.abs(yaw) < YAW_LIMIT; }
From source file:com.tumblr.cardboard.Tumblr3DActivity.java
private boolean isLookingAtObject(float[] initVec, float[] objPositionVec, int texIndex) { // Convert object space to camera space. Use the headView from onNewFrame. Matrix.multiplyMM(mModelView, 0, mHeadView, 0, mModelRect[texIndex], 0); Matrix.multiplyMV(objPositionVec, 0, mModelView, 0, initVec, 0); float pitch = (float) Math.atan2(objPositionVec[1], -objPositionVec[2]); float yaw = (float) Math.atan2(objPositionVec[0], -objPositionVec[2]); Log.v(TAG, "Object position: X: " + objPositionVec[0] + " Y: " + objPositionVec[1] + " Z: " + objPositionVec[2]);// ww w . j ava 2s. c o m Log.v(TAG, "Object Pitch: " + pitch + " Yaw: " + yaw); return (Math.abs(pitch) < PITCH_LIMIT) && (Math.abs(yaw) < YAW_LIMIT); }