List of usage examples for android.graphics PixelFormat PixelFormat
PixelFormat
From source file:org.artoolkit.ar.samples.nftBook.CameraSurface.java
@SuppressLint("NewApi") // CameraInfo @SuppressWarnings("deprecation") // setPreviewFrameRate @Override/* w w w . j av a 2 s. com*/ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if (camera != null) { String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); int capWidth = parameters.getPreviewSize().width; int capHeight = parameters.getPreviewSize().height; int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean frontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt(PreferenceManager.getDefaultSharedPreferences(getContext()) .getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true; } int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]); camera.startPreview(); nftBookActivity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing); } }
From source file:org.artoolkit.ar.samples.ARNativeES1.CameraSurface.java
@SuppressLint("NewApi") // CameraInfo @SuppressWarnings("deprecation") // setPreviewFrameRate @Override//from w w w. j av a 2 s. co m public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if (camera != null) { String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); int capWidth = parameters.getPreviewSize().width; int capHeight = parameters.getPreviewSize().height; int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean frontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt(PreferenceManager.getDefaultSharedPreferences(getContext()) .getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true; } int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]); camera.startPreview(); ARNativeES1Activity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing); } }
From source file:org.artoolkit.ar.samples.ARNativeOSG.CameraSurface.java
@SuppressLint("NewApi") // CameraInfo @SuppressWarnings("deprecation") // setPreviewFrameRate @Override// w w w . j a v a2 s.co m public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if (camera != null) { String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); int capWidth = parameters.getPreviewSize().width; int capHeight = parameters.getPreviewSize().height; int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean frontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt(PreferenceManager.getDefaultSharedPreferences(getContext()) .getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true; } int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]); camera.startPreview(); ARNativeOSGActivity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing); } }
From source file:com.mobiuso.argo.ARModule.CameraSurface.java
@SuppressLint("NewApi") // CameraInfo @SuppressWarnings("deprecation") // setPreviewFrameRate @Override/*from w w w . ja va 2s. c o m*/ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if (camera != null) { String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); int capWidth = parameters.getPreviewSize().width; int capHeight = parameters.getPreviewSize().height; int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean frontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt(PreferenceManager.getDefaultSharedPreferences(getContext()) .getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true; } int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]); camera.startPreview(); ARMovieActivity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing); } }
From source file:org.artoolkit.ar.samples.ARNative.CameraSurface.java
@SuppressLint("NewApi") // CameraInfo @SuppressWarnings("deprecation") // setPreviewFrameRate @Override/*from w w w.ja v a 2 s . co m*/ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if (camera != null) { String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); int capWidth = parameters.getPreviewSize().width; int capHeight = parameters.getPreviewSize().height; int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean frontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt(PreferenceManager.getDefaultSharedPreferences(getContext()) .getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true; } int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]); camera.startPreview(); ARNativeActivity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing); } }
From source file:org.artoolkit.ar.samples.nftSimple.CameraSurface.java
@SuppressLint("NewApi") // CameraInfo @SuppressWarnings("deprecation") // setPreviewFrameRate @Override// www .java 2s . c o m public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if (camera != null) { String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); int capWidth = parameters.getPreviewSize().width; int capHeight = parameters.getPreviewSize().height; int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean frontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt(PreferenceManager.getDefaultSharedPreferences(getContext()) .getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true; } int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]); camera.startPreview(); nftSimpleActivity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing); } }
From source file:org.artoolkit.ar.utils.calib_optical.CameraSurface.java
@SuppressLint("NewApi") // CameraInfo @SuppressWarnings("deprecation") // setPreviewFrameRate @Override//ww w .j a va 2s .c om public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if (camera != null) { String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); int capWidth = parameters.getPreviewSize().width; int capHeight = parameters.getPreviewSize().height; int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean frontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt(PreferenceManager.getDefaultSharedPreferences(getContext()) .getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true; } int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]); camera.startPreview(); calib_optical_Activity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing); } }
From source file:org.artoolkit.ar.base.camera.CaptureCameraPreview.java
@SuppressWarnings("deprecation") // setPreviewFrameRate, getPreviewFrameRate @Override//from w w w .j a v a2 s . c o m public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { Log.i(TAG, "surfaceChanged(): called"); if (camera == null) { // Camera wasn't opened successfully? Log.e(TAG, "surfaceChanged(): No camera in surfaceChanged"); return; } Log.i(TAG, "surfaceChanged(): Surfaced changed, setting up camera and starting preview"); String camResolution = PreferenceManager.getDefaultSharedPreferences(getContext()).getString( "pref_cameraResolution", getResources().getString(R.string.pref_defaultValue_cameraResolution)); String[] dims = camResolution.split("x", 2); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1])); parameters.setPreviewFrameRate(30); camera.setParameters(parameters); parameters = camera.getParameters(); captureWidth = parameters.getPreviewSize().width; captureHeight = parameters.getPreviewSize().height; captureRate = parameters.getPreviewFrameRate(); int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat PixelFormat pixelinfo = new PixelFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); int cameraIndex = 0; boolean cameraIsFrontFacing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); cameraIndex = Integer.parseInt( PreferenceManager.getDefaultSharedPreferences(getContext()).getString("pref_cameraIndex", "0")); Camera.getCameraInfo(cameraIndex, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) cameraIsFrontFacing = true; } int bufSize = captureWidth * captureHeight * pixelinfo.bitsPerPixel / 8; // For the default NV21 format, bitsPerPixel = 12. Log.i(TAG, "surfaceChanged(): Camera buffers will be " + captureWidth + "x" + captureHeight + "@" + pixelinfo.bitsPerPixel + "bpp, " + bufSize + "bytes."); cameraWrapper = new CameraWrapper(camera); cameraWrapper.configureCallback(this, true, 10, bufSize); // For the default NV21 format, bitsPerPixel = 12. camera.startPreview(); if (listener != null) listener.cameraPreviewStarted(captureWidth, captureHeight, captureRate, cameraIndex, cameraIsFrontFacing); }
From source file:com.nekomeshi312.whiteboardcorrection.CameraViewFragment.java
private void setPreviewCallback() { if (!mCameraSetting.isCameraOpen()) return;//from ww w. jav a2s . c om final Camera camera = mCameraSetting.getCamera(); //?? bit/pixel PixelFormat pixelinfo = new PixelFormat(); int pixelformat = camera.getParameters().getPreviewFormat(); PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo); //??? Camera.Parameters parameters = camera.getParameters(); Size sz = parameters.getPreviewSize(); //?????? int bufSize = sz.width * sz.height * pixelinfo.bitsPerPixel / 8; mBuffer = new byte[bufSize]; camera.addCallbackBuffer(mBuffer); camera.setPreviewCallbackWithBuffer(this); }
From source file:org.videolan.vlc.gui.video.VideoPlayerActivity.java
@Override @TargetApi(Build.VERSION_CODES.HONEYCOMB) protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.player);/*from w ww . java 2 s .c om*/ final MyHandler myHandler = new MyHandler(); final LayoutInflater inflater = LayoutInflater.from(this); msgContainer = (LinearLayout) findViewById(R.id.msg_container); int current_locID; if (ActivityDevice.current_locID == -1) current_locID = ActivityShiPin.current_locID; else current_locID = ActivityDevice.current_locID; ArrayList<Integer> locIDList = MainActivity.locIDList; HashMap<Integer, ArrayList<Integer>> loc_devMap = MainActivity.loc_devMap; dev_typeMap = MainActivity.dev_typeMap; HashMap<Integer, String> dev_nameMap = MainActivity.dev_nameMap; Log.e("*****localID", String.valueOf(current_locID)); if (current_locID != -1) { if (loc_devMap.containsKey(current_locID)) { devList = loc_devMap.get(current_locID); if (devList.size() != 0) { int sub = devList.get(0) - 0; final ArrayList<Integer> brokenList = new ArrayList<Integer>(); brokenList.add(0); int i; for (i = 0; i < devList.size(); i++) { int devID = devList.get(i); Log.e("@@@@@@@@", String.valueOf(i)); if (sub != devID - i) { brokenList.add(i); sub = devID - i; } //int typeID=dev_typeMap.get(devID); final View view = inflater.inflate(R.layout.senssor_msg, null); TextView type = (TextView) view.findViewById(R.id.type); TextView msg = (TextView) view.findViewById(R.id.msg); type.setText(dev_nameMap.get(devID) + ":"); msgContainer.addView(view); } brokenList.add(i); new Thread(new Runnable() { @Override public void run() { Log.e(">>>>>>>>>", "??"); String a = null; try { a = WebServiceUtil.getHd("sss", "njuptcloud"); } catch (Exception e1) { // TODO Auto-generated catch block //Toast.makeText(VideoPlayerActivity.this,"?a " , Toast.LENGTH_SHORT).show(); Log.e(">>>>>>>>>", "?a " + e1.getMessage()); e1.printStackTrace(); } if (a != null) { int i = 0; List<String> msgList = new ArrayList<String>(); while (i < (brokenList.size() - 1)) { try { //Toast.makeText(VideoPlayerActivity.this,"??... " , Toast.LENGTH_SHORT).show(); msgList.addAll(WebServiceUtil.getMs(a, devList.get(brokenList.get(i)), devList.get(brokenList.get(i + 1) - 1))); } catch (Exception e) { //Toast.makeText(VideoPlayerActivity.this,"?? " , Toast.LENGTH_SHORT).show(); e.printStackTrace(); } i++; } // List<String> resultList=new ArrayList<String>(); // resultList.add(object) Message msg = new Message(); //Toast.makeText(VideoPlayerActivity.this,msgList.get(0) , Toast.LENGTH_SHORT).show(); msg.obj = msgList; myHandler.sendMessage(msg); } else { Log.e(">>>>>>>", "??"); Message msg = new Message(); //Toast.makeText(VideoPlayerActivity.this,msgList.get(0) , Toast.LENGTH_SHORT).show(); msg.obj = null; myHandler.sendMessage(msg); } //Toast.makeText(VideoPlayerActivity.this, "??", Toast.LENGTH_SHORT).show(); } }).start(); } } } directionLayout = (LinearLayout) findViewById(R.id.direction_layout); left = (ImageButton) findViewById(R.id.left); left.setOnClickListener(directionListenr); bottom = (ImageButton) findViewById(R.id.bottom); bottom.setOnClickListener(directionListenr); top = (ImageButton) findViewById(R.id.top); top.setOnClickListener(directionListenr); right = (ImageButton) findViewById(R.id.right); right.setOnClickListener(directionListenr); SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(this); if (Util.isICSOrLater()) getWindow().getDecorView().findViewById(android.R.id.content) .setOnSystemUiVisibilityChangeListener(new OnSystemUiVisibilityChangeListener() { @Override public void onSystemUiVisibilityChange(int visibility) { if (visibility == mUiVisibility) return; setSurfaceSize(mVideoWidth, mVideoHeight, mVideoVisibleWidth, mVideoVisibleHeight, mSarNum, mSarDen); if (visibility == View.SYSTEM_UI_FLAG_VISIBLE && !mShowing) { showOverlay(); } mUiVisibility = visibility; } }); /** initialize Views an their Events */ mOverlayHeader = findViewById(R.id.player_overlay_header); mOverlayHeader.setVisibility(View.GONE); mOverlayLock = findViewById(R.id.lock_overlay); mOverlayOption = findViewById(R.id.option_overlay); mOverlayProgress = findViewById(R.id.progress_overlay); mOverlayInterface = findViewById(R.id.interface_overlay); play_lay = (RelativeLayout) findViewById(R.id.play_lay); progress_lay = (LinearLayout) findViewById(R.id.progress_lay); /* header */ mTitle = (TextView) findViewById(R.id.player_overlay_title); mSysTime = (TextView) findViewById(R.id.player_overlay_systime); mBattery = (TextView) findViewById(R.id.player_overlay_battery); // Position and remaining time mTime = (TextView) findViewById(R.id.player_overlay_time); mTime.setOnClickListener(mRemainingTimeListener); mLength = (TextView) findViewById(R.id.player_overlay_length); mLength.setOnClickListener(mRemainingTimeListener); // the info textView is not on the overlay mInfo = (TextView) findViewById(R.id.player_overlay_info); mEnableWheelbar = pref.getBoolean("enable_wheel_bar", false); mEnableBrightnessGesture = pref.getBoolean("enable_brightness_gesture", true); mScreenOrientation = Integer .valueOf(pref.getString("screen_orientation_value", "4" /*SCREEN_ORIENTATION_SENSOR*/)); mControls = mEnableWheelbar ? new PlayerControlWheel(this) : new PlayerControlClassic(this); mControls.setOnPlayerControlListener(mPlayerControlListener); FrameLayout mControlContainer = (FrameLayout) findViewById(R.id.player_control); mControlContainer.addView((View) mControls); mAudioTrack = (ImageButton) findViewById(R.id.player_overlay_audio); mAudioTrack.setVisibility(View.GONE); mSubtitle = (ImageButton) findViewById(R.id.player_overlay_subtitle); mSubtitle.setVisibility(View.GONE); mHandler.postDelayed(new Runnable() { @Override public void run() { /*FIXME * The setTracksAndSubtitles method probably doesn't work in case of many many Tracks and Subtitles * Moreover, in a video stream, if Tracks & Subtitles change, they won't be updated */ setESTrackLists(); } }, 1500); mLock = (ImageButton) findViewById(R.id.lock_overlay_button); mLock.setOnClickListener(mLockListener); mSize = (ImageButton) findViewById(R.id.player_overlay_size); mSize.setOnClickListener(mSizeListener); snapshot_lay = (LinearLayout) findViewById(R.id.snapshot_lay);//?? radio_onOrPause_lay = (LinearLayout) findViewById(R.id.radio_onOrPause_lay);//??? record_lay = (LinearLayout) findViewById(R.id.record_lay);// voice_lay = (LinearLayout) findViewById(R.id.voice_lay);// mSnapShot = (ImageButton) findViewById(R.id.snapshot_overlay_button); mSnapShot.setOnClickListener(mSnapShotListener); mRecord = (ImageButton) findViewById(R.id.record_overlay_button); mRecord.setOnClickListener(mRecordListener); //??? mRadio = (ImageButton) findViewById(R.id.radio_onOrPause_button); mRadio_tv = (TextView) findViewById(R.id.radio_onOrPause_tv); mRadio.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub if (mRadio.getBackground().getConstantState() == getResources().getDrawable(R.drawable.radio_on1) .getConstantState()) { //????? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mRadio.setBackgroundDrawable(getResources().getDrawable(R.drawable.radio_pause)); } else { mRadio.setBackground(getResources().getDrawable(R.drawable.radio_pause)); } mRadio_tv.setText("?"); play(); } else { //???? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mRadio.setBackgroundDrawable(getResources().getDrawable(R.drawable.radio_on1)); } else { mRadio.setBackground(getResources().getDrawable(R.drawable.radio_on1)); } mRadio_tv.setText("?"); pause(); } } }); mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE); mAudioMax = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC); // mVoice = (ImageButton) findViewById(R.id.voice_overlay_button); mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mAudioMax, 0); mVoice.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub if (mVoice.getBackground().getConstantState() == getResources().getDrawable(R.drawable.voice_on) .getConstantState()) { //?? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mVoice.setBackgroundDrawable(getResources().getDrawable(R.drawable.voice_off)); } else { mVoice.setBackground(getResources().getDrawable(R.drawable.voice_off)); } mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0); } else { //?? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mVoice.setBackgroundDrawable(getResources().getDrawable(R.drawable.voice_on)); } else { mVoice.setBackground(getResources().getDrawable(R.drawable.voice_on)); } mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mAudioMax, 0); } } }); mSurface = (SurfaceView) findViewById(R.id.player_surface); mSurfaceHolder = mSurface.getHolder(); mSurfaceFrame = (FrameLayout) findViewById(R.id.player_surface_frame); mSurfaceFrame.setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { // TODO Auto-generated method stub if (MotionEvent.ACTION_DOWN == event.getAction()) { if (System.currentTimeMillis() - clickTime < 500) { //?? if (mCurrentSize == SURFACE_4_3) { //??? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); } else { //?? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); } } else { if (!mShowing) { showOverlay(); mSurfaceFrame.setFocusable(false); } else { hideOverlay(true); } } clickTime = System.currentTimeMillis(); if (directionLayout.getVisibility() == View.INVISIBLE) { directionLayout.setVisibility(View.VISIBLE); } else { directionLayout.setVisibility(View.INVISIBLE); } } return true; } }); mOrientationListener = new OrientationEventListener(this) { @Override public void onOrientationChanged(int rotation) { if (((rotation >= 0) && (rotation <= 45)) || (rotation >= 315) || ((rotation >= 135) && (rotation <= 225))) {//portrait mCurrentOrient = true; if (mCurrentOrient != mScreenProtrait) { mScreenProtrait = mCurrentOrient; setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); Log.d(TAG, "Screen orientation changed from Landscape to Portrait!"); } } else if (((rotation > 45) && (rotation < 135)) || ((rotation > 225) && (rotation < 315))) {//landscape mCurrentOrient = false; if (mCurrentOrient != mScreenProtrait) { mScreenProtrait = mCurrentOrient; setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); Log.d(TAG, "Screen orientation changed from Portrait to Landscape!"); } } } }; mOrientationListener.enable(); int pitch; String chroma = pref.getString("chroma_format", ""); if (Util.isGingerbreadOrLater() && chroma.equals("YV12")) { mSurfaceHolder.setFormat(ImageFormat.YV12); pitch = ImageFormat.getBitsPerPixel(ImageFormat.YV12) / 8; } else if (chroma.equals("RV16")) { mSurfaceHolder.setFormat(PixelFormat.RGB_565); PixelFormat info = new PixelFormat(); PixelFormat.getPixelFormatInfo(PixelFormat.RGB_565, info); pitch = info.bytesPerPixel; } else { mSurfaceHolder.setFormat(PixelFormat.RGBX_8888); PixelFormat info = new PixelFormat(); PixelFormat.getPixelFormatInfo(PixelFormat.RGBX_8888, info); pitch = info.bytesPerPixel; } mSurfaceAlign = 16 / pitch - 1; mSurfaceHolder.addCallback(mSurfaceCallback); mSeekbar = (SeekBar) findViewById(R.id.player_overlay_seekbar); mSeekbar.setOnSeekBarChangeListener(mSeekListener); mSwitchingView = false; mEndReached = false; // Clear the resume time, since it is only used for resumes in external // videos. SharedPreferences preferences = getSharedPreferences(PreferencesActivity.NAME, MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.putLong(PreferencesActivity.VIDEO_RESUME_TIME, -1); // Also clear the subs list, because it is supposed to be per session // only (like desktop VLC). We don't want the customs subtitle file // to persist forever with this video. editor.putString(PreferencesActivity.VIDEO_SUBTITLE_FILES, null); editor.commit(); IntentFilter filter = new IntentFilter(); filter.addAction(Intent.ACTION_BATTERY_CHANGED); filter.addAction(VLCApplication.SLEEP_INTENT); registerReceiver(mReceiver, filter); try { mLibVLC = Util.getLibVlcInstance(); } catch (LibVlcException e) { Log.d(TAG, "LibVLC initialisation failed"); return; } EventHandler em = EventHandler.getInstance(); em.addHandler(eventHandler); this.setVolumeControlStream(AudioManager.STREAM_MUSIC); // 100 is the value for screen_orientation_start_lock,?? // setRequestedOrientation(mScreenOrientation != 100 // ? mScreenOrientation // : getScreenOrientation()); //??? // setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); //?? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); }