Example usage for android.opengl GLSurfaceView RENDERMODE_WHEN_DIRTY

List of usage examples for android.opengl GLSurfaceView RENDERMODE_WHEN_DIRTY

Introduction

In this page you can find the example usage for android.opengl GLSurfaceView RENDERMODE_WHEN_DIRTY.

Prototype

int RENDERMODE_WHEN_DIRTY

To view the source code for android.opengl GLSurfaceView RENDERMODE_WHEN_DIRTY.

Click Source Link

Document

The renderer only renders when the surface is created, or when #requestRender is called.

Usage

From source file:joshuatee.wx.USWXOGLRadarActivity.java

@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
    preferences = PreferenceManager.getDefaultSharedPreferences(this);
    //editor = preferences.edit();
    theme_blue_current = preferences.getString("THEME_BLUE", "");
    setTheme(Utility.Theme(theme_blue_current));

    //setContentView(R.layout.activity_uswxoglradar);

    if (!DataStore.loaded)
        DataStore.Init(this);

    space = Pattern.compile(" ");
    comma = Pattern.compile(",");
    colon = Pattern.compile(":");

    //mImageMap = (ImageMap) findViewById(R.id.map);
    //mImageMap.setVisibility(View.GONE);

    cod_warnings_default = preferences.getString("COD_WARNINGS_DEFAULT", "");
    cod_cities_default = preferences.getString("COD_CITIES_DEFAULT", "");
    cod_hw_default = preferences.getString("COD_HW_DEFAULT", "true");
    cod_locdot_default = preferences.getString("COD_LOCDOT_DEFAULT", "true");
    cod_lakes_default = preferences.getString("COD_LAKES_DEFAULT", "true");

    //delay = UtilityImg.GetAnimInterval(preferences);

    img = new TouchImageView2(getApplicationContext());
    img.setMaxZoom(max_zoom);//www .  ja v  a 2s.  c om
    img.setZoom(init_zoom);

    dm = new DisplayMetrics();
    this.getWindowManager().getDefaultDisplay().getMetrics(dm);

    boolean isActionBarSplitted = ((dm.widthPixels / dm.density) < 400.00f);
    if (isActionBarSplitted) {
        ab_split = true;
    }

    int resourceId = getResources().getIdentifier("status_bar_height", "dimen", "android");
    statusBarHeight = getResources().getDimensionPixelSize(resourceId);

    TypedValue tv = new TypedValue();
    getTheme().resolveAttribute(android.R.attr.actionBarSize, tv, true);
    actionBarHeight = getResources().getDimensionPixelSize(tv.resourceId);
    actionBarHeight *= dm.density;

    screen_width = dm.widthPixels;
    screen_height = dm.heightPixels - statusBarHeight - actionBarHeight;

    turl = getIntent().getStringArrayExtra(RID);

    prod = "N0Q";

    view = new GLSurfaceView(this);
    view.setEGLContextClientVersion(2);
    mScaleDetector = new ScaleGestureDetector(this, new ScaleListener());
    mGestureDetector = new GestureDetectorCompat(this, this);

    OGLR = new OpenGLRenderRadar2Dv4(this);
    view.setRenderer(OGLR);
    view.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

    density = (float) (OGLR.ort_int * 2) / dm.widthPixels;

    setContentView(view);
    ogl_in_view = true;

    rid1 = turl[0];
    state = turl[1];
    if (turl.length > 2) {
        prod = turl[2];
        if (prod.equals("N0R")) {
            prod = "N0Q";
        }
    }

    //rid_fav = preferences.getString(pref_token," : : :");
    //sector = preferences.getString("COD_SECTOR_"+state,"");
    //state = preferences.getString("STATE_CODE_"+state,"");
    //onek = preferences.getString("COD_1KM_"+rid1,"");

    setTitle(prod);

    rid_fav = preferences.getString(pref_token, " : : :");
    rid_arr_loc = UtilityFavorites.SetupFavMenu(preferences, rid_fav, turl[0], pref_token_location, colon);
    adapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_spinner_dropdown_item,
            rid_arr_loc);
    getActionBar().setListNavigationCallbacks(adapter, navigationListener);
    getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);

    navigationListener = new OnNavigationListener() {

        @Override
        public boolean onNavigationItemSelected(int itemPosition, long itemId) {

            if (itemPosition == 0 || itemPosition > 2) {
                rid1 = space.split(rid_arr_loc[itemPosition])[0];

                //rid_loc = preferences.getString("RID_LOC_"+rid1,"");
                //editor.putString("NEXRAD_LAST", rid1); 
                //editor.commit();

                old_state = state;
                old_sector = sector;
                old_onek = onek;
                state = comma.split(preferences.getString("RID_LOC_" + rid1, ""))[0];
                sector = preferences.getString("COD_SECTOR_" + state, "");
                state = preferences.getString("STATE_CODE_" + state, "");
                onek = preferences.getString("COD_1KM_" + rid1, "");

                if (prod.equals("2k")) {
                    img_url = img_url.replace(old_sector, sector);
                    img_url = img_url.replace(old_state, state);
                    img_url = img_url.replace(old_onek, onek);
                }
                if (!restarted) {
                    img.resetZoom();
                    img.setZoom(init_zoom);
                    OGLR.setZoom(1.0f);
                    mScaleFactor = 1.0f;
                    OGLR.mPositionX = 0.0f;
                    OGLR.mPositionY = 0.0f;
                }
                restarted = false;
                new GetContent().execute();
            } else if (itemPosition == 1) {
                Intent dtx_srm = new Intent(getApplicationContext(), RIDAddFavActivity.class);
                startActivity(dtx_srm);
            } else if (itemPosition == 2) {
                Intent dtx_srm2 = new Intent(getApplicationContext(), RIDRemoveFavActivity.class);
                startActivity(dtx_srm2);
            }

            return false;
        }
    };
    getActionBar().setListNavigationCallbacks(adapter, navigationListener);

}

From source file:demo.camera.library.ui.CameraCaptureActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
    setContentView(R.layout.activity_camera_capture);

    CameraUtils.clearSessionConfig();//w  ww .  j av a2s  . com
    CameraUtils.clearSessionFolders(this, true, true);

    Spinner spinner = (Spinner) findViewById(R.id.filterSpinner);
    ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this, R.array.cameraFilterNames,
            android.R.layout.simple_spinner_item);
    adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
    // Apply the adapter to the spinner.
    spinner.setAdapter(adapter);
    spinner.setOnItemSelectedListener(this);

    // Define a handler that receives camera-control messages from other threads.  All calls
    // to Camera must be made on the same thread.  Note we create this before the renderer
    // thread, so we know the fully-constructed object will be visible.
    mSessionConfig = CameraUtils.getSessionConfig(this);
    CameraUtils.clearSessionConfig();

    mCameraHandler = new CameraHandler(this);
    mVideoEncoder = new TextureMovieEncoder();
    mRecordingEnabled = mVideoEncoder.isRecording();

    try {
        mMicEncoder = new MicrophoneEncoder(mSessionConfig);
    } catch (IOException e) {
        e.printStackTrace();
    }

    // Configure the GLSurfaceView.  This will start the Renderer thread, with an
    // appropriate EGL context.
    mGLView = (GLSurfaceView) findViewById(R.id.cameraPreview_surfaceView);
    mGLView.setEGLContextClientVersion(2); // select GLES 2.0
    mRenderer = new CameraSurfaceRenderer(mCameraHandler, mSessionConfig, mVideoEncoder);
    mGLView.setRenderer(mRenderer);
    mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    mCameraManager = new AppCameraManager(this, mSessionConfig);
    setUpUi();
    Log.d(TAG, "onCreate complete: " + this);
}

From source file:com.projecttango.examples.java.occlusion.OcclusionActivity.java

/**
 * Connect tango to callbacks and start TangoMesher.
 *///from   w w  w.j  a va  2  s.  c om
private void startupTango() {
    // Connect listeners to Tango Service and forward point cloud information to TangoMesher.
    List<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>();
    mTango.connectListener(framePairs, new Tango.OnTangoUpdateListener() {
        @Override
        public void onPoseAvailable(TangoPoseData tangoPoseData) {
            // We are not using onPoseAvailable for this app.
        }

        @Override
        public void onXyzIjAvailable(TangoXyzIjData tangoXyzIjData) {
            // We are not using onXyzIjAvailable for this app.
        }

        @Override
        public void onFrameAvailable(int cameraId) {
            // Check if the frame available is for the camera we want and update its frame
            // on the view.
            if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {
                // Now that we are receiving onFrameAvailable callbacks, we can switch
                // to RENDERMODE_WHEN_DIRTY to drive the render loop from this callback.
                // This will result on a frame rate of approximately 30FPS, in synchrony with
                // the RGB camera driver.
                // If you need to render at a higher rate (i.e., if you want to render complex
                // animations smoothly) you  can use RENDERMODE_CONTINUOUSLY throughout the
                // application lifecycle.
                if (mSurfaceView.getRenderMode() != GLSurfaceView.RENDERMODE_WHEN_DIRTY) {
                    mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
                }

                // Mark a camera frame as available for rendering in the OpenGL thread.
                mIsFrameAvailableTangoThread.set(true);
                // Trigger an OpenGL render to update the OpenGL scene with the new RGB data.
                mSurfaceView.requestRender();
            }
        }

        @Override
        public void onTangoEvent(TangoEvent tangoEvent) {
            // We are not using onTangoEvent for this app.
        }

        @Override
        public void onPointCloudAvailable(TangoPointCloudData tangoPointCloudData) {
            if (mTangoMesher != null) {
                mTangoMesher.onPointCloudAvailable(tangoPointCloudData);
            }
            if (mPointCloudManager != null) {
                mPointCloudManager.updatePointCloud(tangoPointCloudData);
            }
        }
    });

    // Create a TangoMesher to do a 3D reconstruction of the scene to implement occlusion.
    mTangoMesher = new TangoMesher(new TangoMesher.OnTangoMeshesAvailableListener() {
        @Override
        public void onMeshesAvailable(TangoMesh[] tangoMeshes) {
            mMeshVector = tangoMeshes;
        }
    });

    // Set camera intrinsics to TangoMesher.
    mTangoMesher
            .setColorCameraCalibration(mTango.getCameraIntrinsics(TangoCameraIntrinsics.TANGO_CAMERA_COLOR));
    mTangoMesher
            .setDepthCameraCalibration(mTango.getCameraIntrinsics(TangoCameraIntrinsics.TANGO_CAMERA_DEPTH));
    // Start the scene reconstruction. We will start getting new meshes from TangoMesher. These
    // meshes will be rendered to a depth texture to do the occlusion.
    mTangoMesher.startSceneReconstruction();
}

From source file:com.projecttango.examples.java.augmentedreality.AugmentedRealityActivity.java

/**
 * Set up the callback listeners for the Tango Service and obtain other parameters required
 * after Tango connection./* www  . ja v  a  2 s. co  m*/
 * Listen to updates from the RGB camera.
 */
private void startupTango() {
    // No need to add any coordinate frame pairs since we aren't using pose data from callbacks.
    ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>();

    mTango.connectListener(framePairs, new OnTangoUpdateListener() {
        @Override
        public void onPoseAvailable(TangoPoseData pose) {
            // We are not using onPoseAvailable for this app.
        }

        @Override
        public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
            // We are not using onXyzIjAvailable for this app.
        }

        @Override
        public void onPointCloudAvailable(TangoPointCloudData pointCloud) {
            // We are not using onPointCloudAvailable for this app.
        }

        @Override
        public void onTangoEvent(TangoEvent event) {
            // We are not using onTangoEvent for this app.
        }

        @Override
        public void onFrameAvailable(int cameraId) {
            // Check if the frame available is for the camera we want and update its frame
            // on the view.
            if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {
                // Now that we are receiving onFrameAvailable callbacks, we can switch
                // to RENDERMODE_WHEN_DIRTY to drive the render loop from this callback.
                // This will result in a frame rate of approximately 30FPS, in synchrony with
                // the RGB camera driver.
                // If you need to render at a higher rate (i.e., if you want to render complex
                // animations smoothly) you  can use RENDERMODE_CONTINUOUSLY throughout the
                // application lifecycle.
                if (mSurfaceView.getRenderMode() != GLSurfaceView.RENDERMODE_WHEN_DIRTY) {
                    mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
                }

                // Mark a camera frame as available for rendering in the OpenGL thread.
                mIsFrameAvailableTangoThread.set(true);
                // Trigger a Rajawali render to update the scene with the new RGB data.
                mSurfaceView.requestRender();
            }
        }
    });
}

From source file:com.example.appf.CS3570.java

@TargetApi(Build.VERSION_CODES.FROYO)
public MyGLSurfaceView(Context context, CS3570 parent) {
    super(context);

    mother = parent;//from w  w  w  . j  a v a2 s. com
    // Create an OpenGL ES 2.0 context.
    setEGLContextClientVersion(2);

    // Set the Renderer for drawing on the GLSurfaceView
    mRenderer = new MyGLRenderer();
    setRenderer(mRenderer);

    // Render the view only when there is a change in the drawing data
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}

From source file:com.projecttango.examples.java.openglar.OpenGlAugmentedRealityActivity.java

/**
 * Set up the callback listeners for the Tango Service and obtain other parameters required
 * after Tango connection.//from w w w . j  a  va 2s  . c o m
 * Listen to updates from the RGB camera.
 */
private void startupTango() {
    // No need to add any coordinate frame pairs since we aren't using pose data from callbacks.
    ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>();

    mTango.connectListener(framePairs, new OnTangoUpdateListener() {
        @Override
        public void onPoseAvailable(TangoPoseData pose) {
            // We are not using onPoseAvailable for this app.
        }

        @Override
        public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
            // We are not using onXyzIjAvailable for this app.
        }

        @Override
        public void onPointCloudAvailable(TangoPointCloudData pointCloud) {
            // We are not using onPointCloudAvailable for this app.
        }

        @Override
        public void onTangoEvent(TangoEvent event) {
            // We are not using onTangoEvent for this app.
        }

        @Override
        public void onFrameAvailable(int cameraId) {
            // Check if the frame available is for the camera we want and update its frame
            // on the view.
            if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {
                // Now that we are receiving onFrameAvailable callbacks, we can switch
                // to RENDERMODE_WHEN_DIRTY to drive the render loop from this callback.
                // This will result in a frame rate of approximately 30FPS, in synchrony with
                // the RGB camera driver.
                // If you need to render at a higher rate (i.e., if you want to render complex
                // animations smoothly) you can use RENDERMODE_CONTINUOUSLY throughout the
                // application lifecycle.
                if (mSurfaceView.getRenderMode() != GLSurfaceView.RENDERMODE_WHEN_DIRTY) {
                    mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
                }

                // Mark a camera frame as available for rendering in the OpenGL thread.
                mIsFrameAvailableTangoThread.set(true);
                // Trigger an OpenGL render to update the OpenGL scene with the new RGB data.
                mSurfaceView.requestRender();
            }
        }
    });
}

From source file:com.almalence.plugins.capture.video.VideoCapturePlugin.java

private void createModeSwitcher() {
    LayoutInflater inflator = ApplicationScreen.instance.getLayoutInflater();
    modeSwitcher = (com.almalence.ui.Switch.Switch) inflator
            .inflate(R.layout.plugin_capture_standard_modeswitcher, null, false);

    SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(ApplicationScreen.getMainContext());
    ModePreference = prefs.getString("modeVideoDROPref", "1");
    modeSwitcher.setTextOn(ApplicationScreen.instance.getString(R.string.Pref_Video_DRO_ON));
    modeSwitcher.setTextOff(ApplicationScreen.instance.getString(R.string.Pref_Video_DRO_OFF));
    modeSwitcher.setChecked(ModePreference.compareTo("0") == 0 ? true : false);
    modeSwitcher.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
        @Override/*from w  ww  .  j a  v a  2s  .co  m*/
        public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
            SharedPreferences prefs = PreferenceManager
                    .getDefaultSharedPreferences(ApplicationScreen.getMainContext());

            if (isChecked) {
                ModePreference = "0";
                if (CameraController.isNexus6) {
                    Toast.makeText(ApplicationScreen.getMainContext(),
                            "Not suported currently on your device. Will be available later.",
                            Toast.LENGTH_LONG).show();
                    ModePreference = "1";
                    modeSwitcher.setChecked(false);
                    return;
                }
            } else {
                ModePreference = "1";
            }

            SharedPreferences.Editor editor = prefs.edit();
            editor.putString("modeVideoDROPref", ModePreference);
            editor.commit();

            if (modeDRO()) {
                int quality = Integer.parseInt(prefs.getString(
                        CameraController.getCameraIndex() == 0 ? ApplicationScreen.sImageSizeVideoBackPref
                                : ApplicationScreen.sImageSizeVideoFrontPref,
                        DEFAULT_VIDEO_QUALITY));
                if (quality > CamcorderProfile.QUALITY_720P || maxQuality()) {
                    quality = CamcorderProfile.QUALITY_720P;
                    quickControlIconID = R.drawable.gui_almalence_video_720;
                    editor.putString(
                            CameraController.getCameraIndex() == 0 ? ApplicationScreen.sImageSizeVideoBackPref
                                    : ApplicationScreen.sImageSizeVideoFrontPref,
                            String.valueOf(quality));
                    editor.commit();
                    VideoCapturePlugin.this.refreshQuickControl();
                }
            }

            try {
                CameraController.stopCameraPreview();
                setCameraPreviewSize();
                if (VideoCapturePlugin.this.modeDRO()) {
                    takePictureButton.setVisibility(View.GONE);
                    timeLapseButton.setVisibility(View.GONE);
                    ApplicationScreen.instance.showOpenGLLayer(2);
                    ApplicationScreen.instance.glSetRenderingMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
                } else {
                    if (!CameraController.isRemoteCamera()) {
                        if (displayTakePicture)
                            takePictureButton.setVisibility(View.VISIBLE);
                        timeLapseButton.setVisibility(View.VISIBLE);
                    }

                    droEngine.onPause();
                    ApplicationScreen.instance.hideOpenGLLayer();
                    if (!CameraController.isUseCamera2()) {
                        CameraController.setupCamera(ApplicationScreen.getPreviewSurfaceHolder(), true);
                    }
                    CameraController.startCameraPreview();
                }
            } catch (final Exception e) {
                Log.e(TAG, Util.toString(e.getStackTrace(), '\n'));
                e.printStackTrace();
            }
        }
    });

}

From source file:com.projecttango.examples.java.greenscreen.GreenScreenActivity.java

/**
 * Set up the callback listeners for the Tango service and obtain other parameters required
 * after Tango connection.//from   w  w w .j  ava2 s .  c  o m
 * Listen to updates from the RGB camera and the Point Cloud.
 */
private void startupTango() {
    // No need to add any coordinate frame pairs since we aren't using pose data from callbacks.
    ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>();

    mTango.connectListener(framePairs, new OnTangoUpdateListener() {
        @Override
        public void onPoseAvailable(TangoPoseData pose) {
            // We are not using onPoseAvailable for this app.
        }

        @Override
        public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
            // We are not using onXyzIjAvailable for this app.
        }

        @Override
        public void onPointCloudAvailable(TangoPointCloudData pointCloud) {
            // Save the cloud and point data for later use.
            mPointCloudManager.updatePointCloud(pointCloud);
        }

        @Override
        public void onTangoEvent(TangoEvent event) {
            // We are not using onTangoEvent for this app.
        }

        @Override
        public void onFrameAvailable(int cameraId) {
            // Check if the frame available is for the camera we want and update its frame
            // on the view.
            if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {
                // Now that we are receiving onFrameAvailable callbacks, we can switch
                // to RENDERMODE_WHEN_DIRTY to drive the render loop from this callback.
                // This will result in a frame rate of approximately 30FPS, in synchrony with
                // the RGB camera driver.
                // If you need to render at a higher rate (i.e.: if you want to render complex
                // animations smoothly) you  can use RENDERMODE_CONTINUOUSLY throughout the
                // application lifecycle.
                if (mSurfaceView.getRenderMode() != GLSurfaceView.RENDERMODE_WHEN_DIRTY) {
                    mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
                }

                // Mark a camera frame as available for rendering in the OpenGL thread.
                mIsFrameAvailableTangoThread.set(true);
                // Trigger an OpenGL render to update the OpenGL scene with the new RGB data.
                mSurfaceView.requestRender();
            }
        }
    });

    // Obtain the intrinsic parameters of the color camera.
    mIntrinsics = mTango.getCameraIntrinsics(TangoCameraIntrinsics.TANGO_CAMERA_COLOR);
}

From source file:chenyoufu.hciprojectes10.MyGLSurfaceView.java

public MyGLSurfaceView(Context context) {
    super(context);

    readMesh();/*from ww w  . j  a va  2 s. c  om*/

    // Set the Renderer for drawing on the GLSurfaceView
    mRenderer = new MyGLRenderer();
    setRenderer(mRenderer);

    // Render the view only when there is a change in the drawing data
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

    mScaleDetector = new ScaleGestureDetector(context, new ScaleListener());
}

From source file:com.aimfire.demo.CamcorderActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    if (BuildConfig.DEBUG)
        Log.d(TAG, "create CamcorderActivity");

    loadPrefs();//w w w. j  av  a2 s . c  om

    /*
     *  keep the screen on until we turn off the flag 
     */
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_camcorder);

    /*
     * Obtain the FirebaseAnalytics instance.
     */
    mFirebaseAnalytics = FirebaseAnalytics.getInstance(this);

    /*
     * disable nfc push
     */
    NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(this);
    if (nfcAdapter != null)
        nfcAdapter.setNdefPushMessage(null, this);

    /*
     * get the natural orientation of this device. need to be called before
     * we fix the display orientation
     */
    mNaturalOrientation = getDeviceDefaultOrientation();

    /*
     * force CamcorderActivity in landscape because it is the natural 
     * orientation of the camera sensor
     */
    setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);

    /*
     * get the orientation for SCREEN_ORIENTATION_LANDSCAPE mode. this is the 
     * clockwise rotation of landscape mode from device natural orientation.
     * reverse landscape is 180 degrees different. call this *after* the
     * display orientation is fixed, because only then getDefaultDisplay().
     * getRotation() will consistently return the value we require.
     */
    mLandscapeOrientation = getDeviceLandscapeOrientation();

    /*
     * apply the adapter to the spinner - for filter selection.
     */
    /*
    ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this,
    R.array.cameraFilterNames, android.R.layout.simple_spinner_item);
    adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
            
    spinner.setAdapter(adapter);
    spinner.setOnItemSelectedListener(this);
    */

    mParentView = getActivity().getWindow().getDecorView();

    mCaptureButton = (ImageButton) findViewById(R.id.toggle_recording_button);
    mView3DButton = (ImageButton) findViewById(R.id.view3D_button);
    mExitButton = (ImageButton) findViewById(R.id.exit_button);
    mPvButton = (ImageButton) findViewById(R.id.switch_photo_video_button);
    mFbButton = (ImageButton) findViewById(R.id.switch_front_back_button);
    mLevelButton = (Button) findViewById(R.id.level_button);
    mTimeCounter = (LinearLayout) findViewById(R.id.time_counter);
    mScanModeButton = (ImageButton) findViewById(R.id.mode_button);

    mCaptureButton.setOnClickListener(oclToggle);
    mView3DButton.setOnClickListener(oclView3D);
    mExitButton.setOnClickListener(oclExit);
    mPvButton.setOnClickListener(oclPV);
    mFbButton.setOnClickListener(oclFB);
    mScanModeButton.setOnClickListener(oclSwitchMode);

    mShutterLayout = (FrameLayout) findViewById(R.id.shutter_layout);
    mProgView = (ImageView) findViewById(R.id.circular_progress_view);

    mProgDrawable = new com.aimfire.utilities.CircularProgressDrawable.Builder().setRingWidth(10)
            .setRingColor(getResources().getColor(R.color.orange)).create();

    mProgView.setImageDrawable(mProgDrawable);

    mScanProgView = (ImageView) findViewById(R.id.scan_circular_progress_view);
    mScanProgView.setOnClickListener(oclScan);

    int[] centerGradient = new int[] { getResources().getColor(R.color.start_button_start_color_pressed),
            getResources().getColor(R.color.start_button_end_color_pressed) };

    mScanProgDrawable = new com.aimfire.utilities.CircularProgressDrawable.Builder().setRingWidth(10)
            .setInnerCircleScale(1.0f).setOutlineColor(getResources().getColor(R.color.dark_grey))
            .setRingColor(getResources().getColor(R.color.white))
            .setArcColor(getResources().getColor(android.R.color.holo_blue_dark))
            .setCenterGradient(centerGradient).setWifiBarColor(getResources().getColor(R.color.blue))
            .setMessageSize((int) (10/*sp*/ * getResources().getDisplayMetrics().density))
            .setMessageColor(getResources().getColor(R.color.white)).create();

    mScanProgView.setImageDrawable(mScanProgDrawable);

    /*
     * showing animation for searching remote device
     */
    startScanAnim();

    String startSound = null;
    String stopSound = null;
    for (String s : CAMERA_RECORDING_START_SOUND) {
        if ((new File(s)).exists()) {
            startSound = s;
            break;
        }
    }

    if (startSound != null) {
        mCamStartSoundPlayer = MediaPlayer.create(this, Uri.fromFile(new File(startSound)));
    }

    for (String s : CAMERA_RECORDING_STOP_SOUND) {
        if ((new File(s)).exists()) {
            stopSound = s;
            break;
        }
    }

    if (stopSound != null) {
        mCamStopSoundPlayer = MediaPlayer.create(this, Uri.fromFile(new File(stopSound)));
    }

    /*
     * file name prefix for solo mode. rest of the file name (date and time stamp) are
     * added when recording starts.
     */
    mMpegPrefixSolo = MainConsts.MEDIA_3D_SAVE_PATH + "MPG_solo_";

    /*
     * place UI controls at their initial, default orientation
     */
    adjustUIControls(0);

    /*
     * load the thumbnail of the newest movie to the view3D button
     */
    loadCurrThumbnail();

    /*
     * attempt to open camera with desired dimension. the dimension may be
     * changed if camera doesn't support it, in which case the "preferred" 
     * (by the camera) dimension will be used
     */
    boolean success = openCamera(Camera.CameraInfo.CAMERA_FACING_BACK, mQualityPref);

    if (!success) {
        Toast.makeText(this, R.string.error_opening_camera, Toast.LENGTH_LONG).show();

        finish();
        return;
    }

    /*
     * define a handler that receives camera-control messages from other threads.  
     * all calls to Camera must be made on the same thread.  note we create this 
     * before the renderer thread, so we know the fully-constructed object will 
     * be visible.
     */
    mCameraHandler = new CameraHandler(this);

    /*
     * configure the GLSurfaceView.  this will start the Renderer thread, with an
     * appropriate EGL context.
     */
    mGLView = (GLSurfaceView) findViewById(R.id.cameraPreview_surfaceView);
    mGLView.setEGLContextClientVersion(2); // select GLES 2.0
    mRenderer = new CameraSurfaceRenderer(mCameraHandler, mMovieEncoder);
    mGLView.setRenderer(mRenderer);
    mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    mGLView.setOnTouchListener(otl);

    /*
     * bind to Aimfire service
     */
    mAimfireServiceConn = new AimfireServiceConn(this);

    /*
     * binding doesn't happen until later. wait for it to happen in another 
     * thread and connect to p2p peer if necessary
     */
    (new Thread(mAimfireServiceInitTask)).start();

    /*
     * register for AimfireService message broadcast
     */
    LocalBroadcastManager.getInstance(this).registerReceiver(mAimfireServiceMsgReceiver,
            new IntentFilter(MainConsts.AIMFIRE_SERVICE_MESSAGE));

    /*
     * register for intents sent by the media processor service
     */
    LocalBroadcastManager.getInstance(this).registerReceiver(mMovieProcessorMsgReceiver,
            new IntentFilter(MainConsts.MOVIE_PROCESSOR_MESSAGE));

    /*
     * register for intents sent by the media processor service
     */
    LocalBroadcastManager.getInstance(this).registerReceiver(mMovieEncoderMsgReceiver,
            new IntentFilter(MainConsts.MOVIE_ENCODER_MESSAGE));
}