com.sveder.cardboardpassthrough.MainActivity.java Source code

Java tutorial

Introduction

Here is the source code for com.sveder.cardboardpassthrough.MainActivity.java

Source

/*
 * Copyright 2014 Google Inc. All Rights Reserved.
    
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.sveder.cardboardpassthrough;

import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.hardware.Camera;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Vibrator;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.util.Log;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;

import com.google.vrtoolkit.cardboard.*;

import org.apache.http.client.HttpResponseException;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicResponseHandler;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Timer;
import java.util.TimerTask;

/**
 * A Cardboard sample application.
 */
public class MainActivity extends CardboardActivity
        implements CardboardView.StereoRenderer, OnFrameAvailableListener {

    private static final String TASKS_URL = "http://192.168.1.78:3000/api/v1/messages";
    private SharedPreferences mPreferences;
    private String lMessage = "";
    private TextView resultadoTV;
    /* Aqui */
    private SpeechRecognizer speechRecognizer;
    private Handler handler;
    private Timer timer;
    private boolean activado = false;
    listener voiceRecognizer;
    int status;
    /* Aqui */
    private static final String TAG = "MainActivity";
    private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
    private Camera camera;

    private final String vertexShaderCode = "attribute vec4 position;" + "attribute vec2 inputTextureCoordinate;"
            + "varying vec2 textureCoordinate;" + "void main()" + "{" + "gl_Position = position;"
            + "textureCoordinate = inputTextureCoordinate;" + "}";

    private final String fragmentShaderCode = "#extension GL_OES_EGL_image_external : require\n"
            + "precision mediump float;" + "varying vec2 textureCoordinate;                            \n"
            + "uniform samplerExternalOES s_texture;               \n" + "void main(void) {"
            + "  gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
            //"  gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n" +
            "}";

    private FloatBuffer vertexBuffer, textureVerticesBuffer, vertexBuffer2;
    private ShortBuffer drawListBuffer, buf2;
    private int mProgram;
    private int mPositionHandle, mPositionHandle2;
    private int mColorHandle;
    private int mTextureCoordHandle;

    // number of coordinates per vertex in this array
    static final int COORDS_PER_VERTEX = 2;
    static float squareVertices[] = { // in counterclockwise order:
            -1.0f, -1.0f, // 0.left - mid
            1.0f, -1.0f, // 1. right - mid
            -1.0f, 1.0f, // 2. left - top
            1.0f, 1.0f, // 3. right - top
            //        
            //        -1.0f, -1.0f, //4. left - bottom
            //        1.0f , -1.0f, //5. right - bottom

            //       -1.0f, -1.0f,  // 0. left-bottom
            //        0.0f, -1.0f,   // 1. mid-bottom
            //       -1.0f,  1.0f,   // 2. left-top
            //        0.0f,  1.0f,   // 3. mid-top

            //1.0f, -1.0f,  // 4. right-bottom
            //1.0f, 1.0f,   // 5. right-top

    };

    //, 1, 4, 3, 4, 5, 3
    //    private short drawOrder[] =  {0, 1, 2, 1, 3, 2 };//, 4, 5, 0, 5, 0, 1 }; // order to draw vertices
    private short drawOrder[] = { 0, 2, 1, 1, 2, 3 }; // order to draw vertices
    private short drawOrder2[] = { 2, 0, 3, 3, 0, 1 }; // order to draw vertices

    static float textureVertices[] = { 0.0f, 1.0f, // A. left-bottom
            1.0f, 1.0f, // B. right-bottom
            0.0f, 0.0f, // C. left-top
            1.0f, 0.0f // D. right-top  

            //        1.0f,  1.0f,
            //        1.0f,  0.0f,
            //        0.0f,  1.0f,
            //        0.0f,  0.0f
    };

    private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

    private ByteBuffer indexBuffer; // Buffer for index-array

    private int texture;

    private CardboardOverlayView mOverlayView;

    private CardboardView cardboardView;
    private SurfaceTexture surface;
    private float[] mView;
    private float[] mCamera;

    public void setresultadoTV(String a) {
        resultadoTV.setText(a);
    }

    public void startCamera(int texture) {
        surface = new SurfaceTexture(texture);
        surface.setOnFrameAvailableListener(this);

        camera = Camera.open();

        try {
            camera.setPreviewTexture(surface);
            camera.startPreview();
        } catch (IOException ioe) {
            Log.w("MainActivity", "CAM LAUNCH FAILED");
        }
    }

    static private int createTexture() {
        int[] texture = new int[1];

        GLES20.glGenTextures(1, texture, 0);
        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);

        return texture[0];
    }

    /**
     * Converts a raw text file, saved as a resource, into an OpenGL ES shader
     * @param type The type of shader we will be creating.
     * @param code The resource ID of the raw text file about to be turned into a shader.
     * @return
     */
    private int loadGLShader(int type, String code) {
        int shader = GLES20.glCreateShader(type);
        GLES20.glShaderSource(shader, code);
        GLES20.glCompileShader(shader);

        // Get the compilation status.
        final int[] compileStatus = new int[1];
        GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

        // If the compilation failed, delete the shader.
        if (compileStatus[0] == 0) {
            Log.e(TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
            GLES20.glDeleteShader(shader);
            shader = 0;
        }

        if (shader == 0) {
            throw new RuntimeException("Error creating shader.");
        }

        return shader;
    }

    /**
     * Checks if we've had an error inside of OpenGL ES, and if so what that error is.
     * @param func
     */
    private static void checkGLError(String func) {
        int error;
        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
            Log.e(TAG, func + ": glError " + error);
            throw new RuntimeException(func + ": glError " + error);
        }
    }

    /**
     * Sets the view to our CardboardView and initializes the transformation matrices we will use
     * to render our scene.
     * @param savedInstanceState
     */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        mPreferences = getSharedPreferences("CurrentUser", MODE_PRIVATE);

        setContentView(R.layout.common_ui);
        cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
        cardboardView.setRenderer(this);
        setCardboardView(cardboardView);

        //        mModelCube = new float[16];
        mCamera = new float[16];
        mView = new float[16];
        resultadoTV = (TextView) findViewById(R.id.TVresult);
        //        mModelViewProjection = new float[16];
        //        mModelView = new float[16];
        //        mModelFloor = new float[16];
        //        mHeadView = new float[16];
        //        mVibrator = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE);
        //
        //
        mOverlayView = (CardboardOverlayView) findViewById(R.id.overlay);
        mOverlayView.show3DToast("Hola, bienvenido a New Hearing :D");

        voiceRecognizer = new listener();
        handler = new Handler(Looper.getMainLooper());

        callMessageTask();

        // Funciona
        //startRecognitionActivity();
        // Funciona
    }

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {
        if ((keyCode == KeyEvent.KEYCODE_VOLUME_UP) && activado == false) {
            activado = true;
            status = 1;
            mOverlayView.show3DToast("Activando reconocimiento de voz");
            handler.post(voiceRecognizer);
        } else if ((keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) && activado == true) {
            status = 0;
            activado = false;
            mOverlayView.show3DToast("Desactivando reconocimiento de voz");

        } else {
            mOverlayView.show3DToast("Primero tiene que activar el reconocimiento");
        }
        return true;
    }

    class listener implements RecognitionListener, Runnable {
        @Override
        public void run() {
            speechRecognizer = SpeechRecognizer.createSpeechRecognizer(getApplicationContext());
            speechRecognizer.setRecognitionListener((RecognitionListener) this);

            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            //... all the intent stuff ...
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
            intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);

            speechRecognizer.startListening(intent);
        }

        public void onReadyForSpeech(Bundle params) {
        }

        public void onBeginningOfSpeech() {
            mOverlayView.show3DToast("Reconociendo...");
        }

        public void onRmsChanged(float rmsdB) {
        }

        public void onBufferReceived(byte[] buffer) {
        }

        public void onEndOfSpeech() {
        }

        public void onError(int error) {
            if ((error == SpeechRecognizer.ERROR_NO_MATCH) || (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT)) {
                if (status == 0) {
                    handler.removeCallbacks(this);
                } else {
                    handler.postDelayed(this, 8000);
                }
                mOverlayView.show3DToast("No logramos reconocer algo");
            } else {
                if (status == 0) {
                    handler.removeCallbacks(this);
                } else {
                    handler.postDelayed(this, 8000);
                }
                mOverlayView.show3DToast("Algo salio mal por favor reinicie el reconocimiento ");
            }
        }

        public void onResults(Bundle results) {
            if (status == 0) {
                handler.removeCallbacks(this);
            } else {
                handler.postDelayed(this, 5000);
            }
            String str = new String();
            ArrayList data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
            if (data != null) {
                str += data.get(0);
                mOverlayView.show3DToast(str);
            }

        }

        public void onPartialResults(Bundle partialResults) {
        }

        public void onEvent(int eventType, Bundle params) {
        }
    }

    private void callMessageTask() {
        final Handler handler = new Handler();
        Timer timer = new Timer();
        TimerTask doAsynchronousTask = new TimerTask() {
            @Override
            public void run() {
                handler.post(new Runnable() {
                    public void run() {
                        try {
                            MessageTask getMessageTask = new MessageTask(MainActivity.this);
                            getMessageTask.setMessageLoading(null);
                            getMessageTask.execute(TASKS_URL);
                        } catch (Exception e) {
                            // TODO Auto-generated catch block
                        }
                    }
                });
            }
        };
        timer.schedule(doAsynchronousTask, 0, 5000); //execute in every 50000 ms

    }

    private class MessageTask extends UrlJsonAsyncTask {
        public MessageTask(Context context) {
            super(context);
        }

        protected JSONObject doInBackground(String... urls) {
            DefaultHttpClient client = new DefaultHttpClient();
            HttpPost post = new HttpPost(urls[0]);
            JSONObject holder = new JSONObject();
            JSONObject userObj = new JSONObject();
            String response = null;
            JSONObject json = new JSONObject();
            try {
                try {
                    // setup the returned values in case
                    // something goes wrong
                    json.put("success", false);
                    json.put("info", "Something went wrong. Retry!");
                    // add the user email and password to
                    // the params
                    userObj.put("authentication_token", mPreferences.getString("AuthToken", null));
                    holder.put("user", userObj);
                    StringEntity se = new StringEntity(holder.toString());
                    post.setEntity(se);

                    // setup the request headers
                    post.setHeader("Accept", "application/json");
                    post.setHeader("Content-Type", "application/json");

                    ResponseHandler<String> responseHandler = new BasicResponseHandler();
                    response = client.execute(post, responseHandler);
                    json = new JSONObject(response);

                } catch (HttpResponseException e) {
                    e.printStackTrace();
                    Log.e("ClientProtocol", "" + e);
                    json.put("info", "Error");
                } catch (IOException e) {
                    e.printStackTrace();
                    Log.e("IO", "" + e);
                }
            } catch (JSONException e) {
                e.printStackTrace();
                Log.e("JSON", "" + e);
            }

            return json;
        }

        @Override
        protected void onPreExecute() {
        }

        @Override
        protected void onPostExecute(JSONObject json) {
            try {
                JSONObject jsonTasks = json.getJSONObject("data").getJSONObject("message");
                if (!lMessage.equals(jsonTasks.getString("message"))) {
                    CustomToast toast = new CustomToast(getApplicationContext(), Toast.LENGTH_LONG);
                    int offsetX = 100;
                    int offsetY = 250;
                    toast.setGravity(Gravity.LEFT | Gravity.TOP, offsetX, offsetY);
                    toast.show(jsonTasks.getString("message"));
                }
                lMessage = jsonTasks.getString("message");
            } catch (Exception e) {
            } finally {
                super.onPostExecute(json);
            }
        }
    }

    /*private void startRecognitionThread(final int code){
    TimerTask tarea = new TimerTask(){
        @Override
        public void run() {
            if(code == 0){
                timer.cancel();//esto finaliza el hilo
            }else{
                handler.post(new Runnable(){
                    public void run() {
                        startRecognitionActivity();
                    };
                });
            }
        }
    };
        
    timer = new Timer();
    timer.schedule(tarea, 100,100);//se crea un hilo
        
    }
    private void startRecognitionActivity(){
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"com.sveder.cardboardpassthrough");
    intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS,5);
        
    speechRecognizer.startListening(intent);
    }*/
    @Override
    public void onCardboardTrigger() {
        //startRecognitionActivity();
    }

    @Override
    public void onRendererShutdown() {
        Log.i(TAG, "onRendererShutdown");
    }

    @Override
    public void onSurfaceChanged(int width, int height) {
        Log.i(TAG, "onSurfaceChanged");
    }

    /**
     * Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
     * arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
     * @param config The EGL configuration used when creating the surface.
     */
    @Override
    public void onSurfaceCreated(EGLConfig config) {
        Log.i(TAG, "onSurfaceCreated");
        GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well

        ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
        bb.order(ByteOrder.nativeOrder());
        vertexBuffer = bb.asFloatBuffer();
        vertexBuffer.put(squareVertices);
        vertexBuffer.position(0);

        ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
        dlb.order(ByteOrder.nativeOrder());
        drawListBuffer = dlb.asShortBuffer();
        drawListBuffer.put(drawOrder);
        drawListBuffer.position(0);

        ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
        bb2.order(ByteOrder.nativeOrder());
        textureVerticesBuffer = bb2.asFloatBuffer();
        textureVerticesBuffer.put(textureVertices);
        textureVerticesBuffer.position(0);

        int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
        int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);

        mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
        GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
        GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
        GLES20.glLinkProgram(mProgram);

        texture = createTexture();
        startCamera(texture);

        //        ByteBuffer bbVertices = ByteBuffer.allocateDirect(DATA.CUBE_COORDS.length * 4);
        //        bbVertices.order(ByteOrder.nativeOrder());
        //        mCubeVertices = bbVertices.asFloatBuffer();
        //        mCubeVertices.put(DATA.CUBE_COORDS);
        //        mCubeVertices.position(0);
        //
        //        ByteBuffer bbColors = ByteBuffer.allocateDirect(DATA.CUBE_COLORS.length * 4);
        //        bbColors.order(ByteOrder.nativeOrder());
        //        mCubeColors = bbColors.asFloatBuffer();
        //        mCubeColors.put(DATA.CUBE_COLORS);
        //        mCubeColors.position(0);
        //
        //        ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(DATA.CUBE_FOUND_COLORS.length * 4);
        //        bbFoundColors.order(ByteOrder.nativeOrder());
        //        mCubeFoundColors = bbFoundColors.asFloatBuffer();
        //        mCubeFoundColors.put(DATA.CUBE_FOUND_COLORS);
        //        mCubeFoundColors.position(0);
        //
        //        ByteBuffer bbNormals = ByteBuffer.allocateDirect(DATA.CUBE_NORMALS.length * 4);
        //        bbNormals.order(ByteOrder.nativeOrder());
        //        mCubeNormals = bbNormals.asFloatBuffer();
        //        mCubeNormals.put(DATA.CUBE_NORMALS);
        //        mCubeNormals.position(0);
        //
        //        // make a floor
        //        ByteBuffer bbFloorVertices = ByteBuffer.allocateDirect(DATA.FLOOR_COORDS.length * 4);
        //        bbFloorVertices.order(ByteOrder.nativeOrder());
        //        mFloorVertices = bbFloorVertices.asFloatBuffer();
        //        mFloorVertices.put(DATA.FLOOR_COORDS);
        //        mFloorVertices.position(0);
        //
        //        ByteBuffer bbFloorNormals = ByteBuffer.allocateDirect(DATA.FLOOR_NORMALS.length * 4);
        //        bbFloorNormals.order(ByteOrder.nativeOrder());
        //        mFloorNormals = bbFloorNormals.asFloatBuffer();
        //        mFloorNormals.put(DATA.FLOOR_NORMALS);
        //        mFloorNormals.position(0);
        //
        //        ByteBuffer bbFloorColors = ByteBuffer.allocateDirect(DATA.FLOOR_COLORS.length * 4);
        //        bbFloorColors.order(ByteOrder.nativeOrder());
        //        mFloorColors = bbFloorColors.asFloatBuffer();
        //        mFloorColors.put(DATA.FLOOR_COLORS);
        //        mFloorColors.position(0);
        //
        //        int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, R.raw.light_vertex);
        //        int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.grid_fragment);
        //
        //        mGlProgram = GLES20.glCreateProgram();
        //        GLES20.glAttachShader(mGlProgram, vertexShader);
        //        GLES20.glAttachShader(mGlProgram, gridShader);
        //        GLES20.glLinkProgram(mGlProgram);
        //
        //        GLES20.glEnable(GLES20.GL_DEPTH_TEST);
        //
        //        // Object first appears directly in front of user
        //        Matrix.setIdentityM(mModelCube, 0);
        //        Matrix.translateM(mModelCube, 0, 0, 0, -mObjectDistance);
        //
        //        Matrix.setIdentityM(mModelFloor, 0);
        //        Matrix.translateM(mModelFloor, 0, 0, -mFloorDepth, 0); // Floor appears below user
        //
        //        checkGLError("onSurfaceCreated");
    }

    //    /**
    //     * Converts a raw text file into a string.
    //     * @param resId The resource ID of the raw text file about to be turned into a shader.
    //     * @return
    //     */
    //    private String readRawTextFile(int resId) {
    //        InputStream inputStream = getResources().openRawResource(resId);
    //        try {
    //            BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
    //            StringBuilder sb = new StringBuilder();
    //            String line;
    //            while ((line = reader.readLine()) != null) {
    //                sb.append(line).append("\n");
    //            }
    //            reader.close();
    //            return sb.toString();
    //        } catch (IOException e) {
    //            e.printStackTrace();
    //        }
    //        return "";
    //    }
    //

    /**
     * Prepares OpenGL ES before we draw a frame.
     * @param headTransform The head transformation in the new frame.
     */
    @Override
    public void onNewFrame(HeadTransform headTransform) {
        //        GLES20.glUseProgram(mGlProgram);
        //
        //        mModelViewProjectionParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVP");
        //        mLightPosParam = GLES20.glGetUniformLocation(mGlProgram, "u_LightPos");
        //        mModelViewParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVMatrix");
        //        mModelParam = GLES20.glGetUniformLocation(mGlProgram, "u_Model");
        //        mIsFloorParam = GLES20.glGetUniformLocation(mGlProgram, "u_IsFloor");
        //
        //        // Build the Model part of the ModelView matrix.
        //        Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);
        //
        //        // Build the camera matrix and apply it to the ModelView.
        //        Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);
        //
        //        headTransform.getHeadView(mHeadView, 0);
        //
        //        checkGLError("onReadyToDraw");

        float[] mtx = new float[16];
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        surface.updateTexImage();
        surface.getTransformMatrix(mtx);

    }

    @Override
    public void onFrameAvailable(SurfaceTexture arg0) {
        this.cardboardView.requestRender();

    }

    /**
     * Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
     * a parameter.
     * @param transform The transformations to apply to render this eye.
     */
    @Override
    public void onDrawEye(EyeTransform transform) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

        GLES20.glUseProgram(mProgram);

        GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);

        mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride,
                vertexBuffer);

        mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
        GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
        GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride,
                textureVerticesBuffer);

        mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");

        GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);

        // Disable vertex array
        GLES20.glDisableVertexAttribArray(mPositionHandle);
        GLES20.glDisableVertexAttribArray(mTextureCoordHandle);

        Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);

        //        mPositionParam = GLES20.glGetAttribLocation(mGlProgram, "a_Position");
        //        mNormalParam = GLES20.glGetAttribLocation(mGlProgram, "a_Normal");
        //        mColorParam = GLES20.glGetAttribLocation(mGlProgram, "a_Color");
        //
        //        GLES20.glEnableVertexAttribArray(mPositionParam);
        //        GLES20.glEnableVertexAttribArray(mNormalParam);
        //        GLES20.glEnableVertexAttribArray(mColorParam);
        //        checkGLError("mColorParam");
        //
        //        // Apply the eye transformation to the camera.
        //        Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);
        //
        //        // Set the position of the light
        //        Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0);
        //        GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1],
        //                mLightPosInEyeSpace[2]);
        //
        //        // Build the ModelView and ModelViewProjection matrices
        //        // for calculating cube position and light.
        //        Matrix.multiplyMM(mModelView, 0, mView, 0, mModelCube, 0);
        //        Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0, mModelView, 0);
        //        drawCube();
        //
        //        // Set mModelView for the floor, so we draw floor in the correct location
        //        Matrix.multiplyMM(mModelView, 0, mView, 0, mModelFloor, 0);
        //        Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0,
        //            mModelView, 0);
        //        drawFloor(transform.getPerspective());
    }

    @Override
    public void onFinishFrame(Viewport viewport) {
    }

    //    /**
    //     * Draw the cube. We've set all of our transformation matrices. Now we simply pass them into
    //     * the shader.
    //     */
    //    public void drawCube() {
    //        // This is not the floor!
    //        GLES20.glUniform1f(mIsFloorParam, 0f);
    //
    //        // Set the Model in the shader, used to calculate lighting
    //        GLES20.glUniformMatrix4fv(mModelParam, 1, false, mModelCube, 0);
    //
    //        // Set the ModelView in the shader, used to calculate lighting
    //        GLES20.glUniformMatrix4fv(mModelViewParam, 1, false, mModelView, 0);
    //
    //        // Set the position of the cube
    //        GLES20.glVertexAttribPointer(mPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
    //                false, 0, mCubeVertices);
    //
    //        // Set the ModelViewProjection matrix in the shader.
    //        GLES20.glUniformMatrix4fv(mModelViewProjectionParam, 1, false, mModelViewProjection, 0);
    //
    //        // Set the normal positions of the cube, again for shading
    //        GLES20.glVertexAttribPointer(mNormalParam, 3, GLES20.GL_FLOAT,
    //                false, 0, mCubeNormals);
    //
    //
    //
    //        if (isLookingAtObject()) {
    //            GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false,
    //                    0, mCubeFoundColors);
    //        } else {
    //            GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false,
    //                    0, mCubeColors);
    //        }
    //        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
    //        checkGLError("Drawing cube");
    //    }
    //
    //    /**
    //     * Draw the floor. This feeds in data for the floor into the shader. Note that this doesn't
    //     * feed in data about position of the light, so if we rewrite our code to draw the floor first,
    //     * the lighting might look strange.
    //     */
    //    public void drawFloor(float[] perspective) {
    //        // This is the floor!
    //        GLES20.glUniform1f(mIsFloorParam, 1f);
    //
    //        // Set ModelView, MVP, position, normals, and color
    //        GLES20.glUniformMatrix4fv(mModelParam, 1, false, mModelFloor, 0);
    //        GLES20.glUniformMatrix4fv(mModelViewParam, 1, false, mModelView, 0);
    //        GLES20.glUniformMatrix4fv(mModelViewProjectionParam, 1, false, mModelViewProjection, 0);
    //        GLES20.glVertexAttribPointer(mPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
    //                false, 0, mFloorVertices);
    //        GLES20.glVertexAttribPointer(mNormalParam, 3, GLES20.GL_FLOAT, false, 0, mFloorNormals);
    //        GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false, 0, mFloorColors);
    //        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6);
    //
    //        checkGLError("drawing floor");
    //    }
    //
    /**
     * Increment the score, hide the object, and give feedback if the user pulls the magnet while
     * looking at the object. Otherwise, remind the user what to do.
     */

    //    /**
    //     * Find a new random position for the object.
    //     * We'll rotate it around the Y-axis so it's out of sight, and then up or down by a little bit.
    //     */
    //    private void hideObject() {
    //        float[] rotationMatrix = new float[16];
    //        float[] posVec = new float[4];
    //
    //        // First rotate in XZ plane, between 90 and 270 deg away, and scale so that we vary
    //        // the object's distance from the user.
    //        float angleXZ = (float) Math.random() * 180 + 90;
    //        Matrix.setRotateM(rotationMatrix, 0, angleXZ, 0f, 1f, 0f);
    //        float oldObjectDistance = mObjectDistance;
    //        mObjectDistance = (float) Math.random() * 15 + 5;
    //        float objectScalingFactor = mObjectDistance / oldObjectDistance;
    //        Matrix.scaleM(rotationMatrix, 0, objectScalingFactor, objectScalingFactor, objectScalingFactor);
    //        Matrix.multiplyMV(posVec, 0, rotationMatrix, 0, mModelCube, 12);
    //
    //        // Now get the up or down angle, between -20 and 20 degrees
    //        float angleY = (float) Math.random() * 80 - 40; // angle in Y plane, between -40 and 40
    //        angleY = (float) Math.toRadians(angleY);
    //        float newY = (float)Math.tan(angleY) * mObjectDistance;
    //
    //        Matrix.setIdentityM(mModelCube, 0);
    //        Matrix.translateM(mModelCube, 0, posVec[0], newY, posVec[2]);
    //    }

    /**
     * Check if user is looking at object by calculating where the object is in eye-space.
     * @return
     */
    //    private boolean isLookingAtObject() {
    //        float[] initVec = {0, 0, 0, 1.0f};
    //        float[] objPositionVec = new float[4];
    //
    //        // Convert object space to camera space. Use the headView from onNewFrame.
    //        Matrix.multiplyMM(mModelView, 0, mHeadView, 0, mModelCube, 0);
    //        Matrix.multiplyMV(objPositionVec, 0, mModelView, 0, initVec, 0);
    //
    //        float pitch = (float)Math.atan2(objPositionVec[1], -objPositionVec[2]);
    //        float yaw = (float)Math.atan2(objPositionVec[0], -objPositionVec[2]);
    //
    //        Log.i(TAG, "Object position: X: " + objPositionVec[0]
    //                + "  Y: " + objPositionVec[1] + " Z: " + objPositionVec[2]);
    //        Log.i(TAG, "Object Pitch: " + pitch +"  Yaw: " + yaw);
    //
    //        return (Math.abs(pitch) < PITCH_LIMIT) && (Math.abs(yaw) < YAW_LIMIT);
    //    }
}