List of usage examples for android.graphics ImageFormat YV12
int YV12
To view the source code for android.graphics ImageFormat YV12.
Click Source Link
Android YUV format.
This format is exposed to software decoders and applications.
YV12 is a 4:2:0 YCrCb planar format comprised of a WxH Y plane followed by (W/2) x (H/2) Cr and Cb planes.
This format assumes
y_size = stride * height c_stride = ALIGN(stride/2, 16) c_size = c_stride * height/2 size = y_size + c_size * 2 cr_offset = y_size cb_offset = y_size + c_size
For the android.hardware.camera2 API, the #YUV_420_888 format is recommended for YUV output instead.
For the older camera API, this format is guaranteed to be supported for android.hardware.Camera preview images since API level 12; for earlier API versions, check android.hardware.Camera.Parameters#getSupportedPreviewFormats() .
From source file:Main.java
public static byte[] swapColors(byte[] data, int w, int h, int pictureFormat) { switch (pictureFormat) { case ImageFormat.YV12: return swapYUV420Planar(data); case ImageFormat.NV21: return swapYUV420SemiPlanar(data, w, h); default:/*from w w w . jav a 2 s . co m*/ Log.w("Util", "No color format to swap"); } return data; }
From source file:Main.java
public static int getEncoderColorFormat(int previewFormat) { if (Build.VERSION.SDK_INT >= 21) { return MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible; }/*from w w w . jav a 2 s. c om*/ switch (previewFormat) { case ImageFormat.NV21: return MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar; case ImageFormat.YV12: return MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar; } return -1; }
From source file:Main.java
public static String translatePreviewFormat(int supportedPreviewFormat) { switch (supportedPreviewFormat) { case ImageFormat.JPEG: return "ImageFormat.JPEG"; case ImageFormat.NV16: return "ImageFormat.NV16"; case ImageFormat.NV21: return "ImageFormat.NV21"; case ImageFormat.RAW10: return "ImageFormat.RAW10"; case ImageFormat.RAW_SENSOR: return "ImageFormat.RAW_SENSOR"; case ImageFormat.RGB_565: return "ImageFormat.RGB_565"; case ImageFormat.UNKNOWN: return "ImageFormat.UNKNOWN"; case ImageFormat.YUV_420_888: return "ImageFormat.YUV_420_888"; case ImageFormat.YUY2: return "ImageFormat.YUY2"; case ImageFormat.YV12: return "ImageFormat.YV12"; default:/* w w w.j ava 2s . c om*/ return "xxxxxxxxdefault"; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();//ww w . j a v a 2 s . c o m switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();//from www.java2s. c om switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.RAW12: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:com.example.android.camera2basic.Fragment.Camera2BasicFragment.java
/** * Sets up member variables related to camera. * , ?/*from w ww .j av a2 s .com*/ * * ??: * 1. ???, ? * 2. ????, ????? * 3. ??, ? * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */ private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. // //maxImagesImageReader?? Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea()); Log.d(TAG, "largest.width: " + largest.getWidth()); Log.d(TAG, "largest.height: " + largest.getHeight()); // mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), // ImageFormat.YUV_420_888, /*maxImages*/5); // // mImageReader.setOnImageAvailableListener( // mOnImageAvailableListener, null); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. // ???(??, ""???ROTATION_90 // ROTATION_270,?ROTATION_0ROTATION_180) int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions // ??(""?0, , ?90) // ?, ?, ?, , 90, switch // ?? mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; Log.d(TAG, "displayRotation: " + displayRotation); Log.d(TAG, "sensorOritentation: " + mSensorOrientation); switch (displayRotation) { // ROTATION_0ROTATION_180?????? // ?, ?90270, ??(?true) case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; // ROTATION_90ROTATION_270?????? // ?, ?0180, ??(?true) case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } // ???, Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); // ?(), ?? int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; // ?(, ?(texture????)) int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; Log.d(TAG, "??maxPreviewWidth: " + maxPreviewWidth); Log.d(TAG, "??maxPreviewHeight: " + maxPreviewHeight); // ??, if (swappedDimensions) { // rotatedPreviewWidth = height; // rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } Log.d(TAG, "?"); Log.d(TAG, "real preview width: " + rotatedPreviewWidth); Log.d(TAG, "real preview height: " + rotatedPreviewHeight); if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. // ? // ?:map.getOutputSizes(SurfaceTexture.class)SurfaceTexture?List mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. // ???? int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { // ??(landscape) mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { // ???(??) mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // ??previewSize======================================= Log.d(TAG, "??"); Log.d(TAG, "mPreviewSize.getWidth: " + mPreviewSize.getWidth()); Log.d(TAG, "mPreviewSize.getHeight: " + mPreviewSize.getHeight()); // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; // mPreviewSize = new Size(width,height); Log.d(TAG, " mPreviewSize " + mPreviewSize.getWidth() + mPreviewSize.getHeight()); sendQuene = CameraActivity.quene.getH264SendQueue(); mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YV12, /*maxImages*/5); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null); // * // mH264Encode = new EncoderH264(960,540,framerate); // mH264Encode = new EncoderH264(mPreviewSize.getWidth(),mPreviewSize.getHeight(),framerate); mH264Encode = new EncoderH264(mPreviewSize.getHeight(), mPreviewSize.getWidth(), framerate); Log.d(TAG, "????"); try { mH264Encode.createFile(); } catch (FileNotFoundException e) { e.printStackTrace(); } mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. } }
From source file:org.videolan.vlc.gui.video.VideoPlayerActivity.java
@Override @TargetApi(Build.VERSION_CODES.HONEYCOMB) protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.player);/*from w ww . j a v a2s.com*/ final MyHandler myHandler = new MyHandler(); final LayoutInflater inflater = LayoutInflater.from(this); msgContainer = (LinearLayout) findViewById(R.id.msg_container); int current_locID; if (ActivityDevice.current_locID == -1) current_locID = ActivityShiPin.current_locID; else current_locID = ActivityDevice.current_locID; ArrayList<Integer> locIDList = MainActivity.locIDList; HashMap<Integer, ArrayList<Integer>> loc_devMap = MainActivity.loc_devMap; dev_typeMap = MainActivity.dev_typeMap; HashMap<Integer, String> dev_nameMap = MainActivity.dev_nameMap; Log.e("*****localID", String.valueOf(current_locID)); if (current_locID != -1) { if (loc_devMap.containsKey(current_locID)) { devList = loc_devMap.get(current_locID); if (devList.size() != 0) { int sub = devList.get(0) - 0; final ArrayList<Integer> brokenList = new ArrayList<Integer>(); brokenList.add(0); int i; for (i = 0; i < devList.size(); i++) { int devID = devList.get(i); Log.e("@@@@@@@@", String.valueOf(i)); if (sub != devID - i) { brokenList.add(i); sub = devID - i; } //int typeID=dev_typeMap.get(devID); final View view = inflater.inflate(R.layout.senssor_msg, null); TextView type = (TextView) view.findViewById(R.id.type); TextView msg = (TextView) view.findViewById(R.id.msg); type.setText(dev_nameMap.get(devID) + ":"); msgContainer.addView(view); } brokenList.add(i); new Thread(new Runnable() { @Override public void run() { Log.e(">>>>>>>>>", "??"); String a = null; try { a = WebServiceUtil.getHd("sss", "njuptcloud"); } catch (Exception e1) { // TODO Auto-generated catch block //Toast.makeText(VideoPlayerActivity.this,"?a " , Toast.LENGTH_SHORT).show(); Log.e(">>>>>>>>>", "?a " + e1.getMessage()); e1.printStackTrace(); } if (a != null) { int i = 0; List<String> msgList = new ArrayList<String>(); while (i < (brokenList.size() - 1)) { try { //Toast.makeText(VideoPlayerActivity.this,"??... " , Toast.LENGTH_SHORT).show(); msgList.addAll(WebServiceUtil.getMs(a, devList.get(brokenList.get(i)), devList.get(brokenList.get(i + 1) - 1))); } catch (Exception e) { //Toast.makeText(VideoPlayerActivity.this,"?? " , Toast.LENGTH_SHORT).show(); e.printStackTrace(); } i++; } // List<String> resultList=new ArrayList<String>(); // resultList.add(object) Message msg = new Message(); //Toast.makeText(VideoPlayerActivity.this,msgList.get(0) , Toast.LENGTH_SHORT).show(); msg.obj = msgList; myHandler.sendMessage(msg); } else { Log.e(">>>>>>>", "??"); Message msg = new Message(); //Toast.makeText(VideoPlayerActivity.this,msgList.get(0) , Toast.LENGTH_SHORT).show(); msg.obj = null; myHandler.sendMessage(msg); } //Toast.makeText(VideoPlayerActivity.this, "??", Toast.LENGTH_SHORT).show(); } }).start(); } } } directionLayout = (LinearLayout) findViewById(R.id.direction_layout); left = (ImageButton) findViewById(R.id.left); left.setOnClickListener(directionListenr); bottom = (ImageButton) findViewById(R.id.bottom); bottom.setOnClickListener(directionListenr); top = (ImageButton) findViewById(R.id.top); top.setOnClickListener(directionListenr); right = (ImageButton) findViewById(R.id.right); right.setOnClickListener(directionListenr); SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(this); if (Util.isICSOrLater()) getWindow().getDecorView().findViewById(android.R.id.content) .setOnSystemUiVisibilityChangeListener(new OnSystemUiVisibilityChangeListener() { @Override public void onSystemUiVisibilityChange(int visibility) { if (visibility == mUiVisibility) return; setSurfaceSize(mVideoWidth, mVideoHeight, mVideoVisibleWidth, mVideoVisibleHeight, mSarNum, mSarDen); if (visibility == View.SYSTEM_UI_FLAG_VISIBLE && !mShowing) { showOverlay(); } mUiVisibility = visibility; } }); /** initialize Views an their Events */ mOverlayHeader = findViewById(R.id.player_overlay_header); mOverlayHeader.setVisibility(View.GONE); mOverlayLock = findViewById(R.id.lock_overlay); mOverlayOption = findViewById(R.id.option_overlay); mOverlayProgress = findViewById(R.id.progress_overlay); mOverlayInterface = findViewById(R.id.interface_overlay); play_lay = (RelativeLayout) findViewById(R.id.play_lay); progress_lay = (LinearLayout) findViewById(R.id.progress_lay); /* header */ mTitle = (TextView) findViewById(R.id.player_overlay_title); mSysTime = (TextView) findViewById(R.id.player_overlay_systime); mBattery = (TextView) findViewById(R.id.player_overlay_battery); // Position and remaining time mTime = (TextView) findViewById(R.id.player_overlay_time); mTime.setOnClickListener(mRemainingTimeListener); mLength = (TextView) findViewById(R.id.player_overlay_length); mLength.setOnClickListener(mRemainingTimeListener); // the info textView is not on the overlay mInfo = (TextView) findViewById(R.id.player_overlay_info); mEnableWheelbar = pref.getBoolean("enable_wheel_bar", false); mEnableBrightnessGesture = pref.getBoolean("enable_brightness_gesture", true); mScreenOrientation = Integer .valueOf(pref.getString("screen_orientation_value", "4" /*SCREEN_ORIENTATION_SENSOR*/)); mControls = mEnableWheelbar ? new PlayerControlWheel(this) : new PlayerControlClassic(this); mControls.setOnPlayerControlListener(mPlayerControlListener); FrameLayout mControlContainer = (FrameLayout) findViewById(R.id.player_control); mControlContainer.addView((View) mControls); mAudioTrack = (ImageButton) findViewById(R.id.player_overlay_audio); mAudioTrack.setVisibility(View.GONE); mSubtitle = (ImageButton) findViewById(R.id.player_overlay_subtitle); mSubtitle.setVisibility(View.GONE); mHandler.postDelayed(new Runnable() { @Override public void run() { /*FIXME * The setTracksAndSubtitles method probably doesn't work in case of many many Tracks and Subtitles * Moreover, in a video stream, if Tracks & Subtitles change, they won't be updated */ setESTrackLists(); } }, 1500); mLock = (ImageButton) findViewById(R.id.lock_overlay_button); mLock.setOnClickListener(mLockListener); mSize = (ImageButton) findViewById(R.id.player_overlay_size); mSize.setOnClickListener(mSizeListener); snapshot_lay = (LinearLayout) findViewById(R.id.snapshot_lay);//?? radio_onOrPause_lay = (LinearLayout) findViewById(R.id.radio_onOrPause_lay);//??? record_lay = (LinearLayout) findViewById(R.id.record_lay);// voice_lay = (LinearLayout) findViewById(R.id.voice_lay);// mSnapShot = (ImageButton) findViewById(R.id.snapshot_overlay_button); mSnapShot.setOnClickListener(mSnapShotListener); mRecord = (ImageButton) findViewById(R.id.record_overlay_button); mRecord.setOnClickListener(mRecordListener); //??? mRadio = (ImageButton) findViewById(R.id.radio_onOrPause_button); mRadio_tv = (TextView) findViewById(R.id.radio_onOrPause_tv); mRadio.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub if (mRadio.getBackground().getConstantState() == getResources().getDrawable(R.drawable.radio_on1) .getConstantState()) { //????? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mRadio.setBackgroundDrawable(getResources().getDrawable(R.drawable.radio_pause)); } else { mRadio.setBackground(getResources().getDrawable(R.drawable.radio_pause)); } mRadio_tv.setText("?"); play(); } else { //???? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mRadio.setBackgroundDrawable(getResources().getDrawable(R.drawable.radio_on1)); } else { mRadio.setBackground(getResources().getDrawable(R.drawable.radio_on1)); } mRadio_tv.setText("?"); pause(); } } }); mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE); mAudioMax = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC); // mVoice = (ImageButton) findViewById(R.id.voice_overlay_button); mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mAudioMax, 0); mVoice.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub if (mVoice.getBackground().getConstantState() == getResources().getDrawable(R.drawable.voice_on) .getConstantState()) { //?? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mVoice.setBackgroundDrawable(getResources().getDrawable(R.drawable.voice_off)); } else { mVoice.setBackground(getResources().getDrawable(R.drawable.voice_off)); } mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0); } else { //?? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mVoice.setBackgroundDrawable(getResources().getDrawable(R.drawable.voice_on)); } else { mVoice.setBackground(getResources().getDrawable(R.drawable.voice_on)); } mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mAudioMax, 0); } } }); mSurface = (SurfaceView) findViewById(R.id.player_surface); mSurfaceHolder = mSurface.getHolder(); mSurfaceFrame = (FrameLayout) findViewById(R.id.player_surface_frame); mSurfaceFrame.setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { // TODO Auto-generated method stub if (MotionEvent.ACTION_DOWN == event.getAction()) { if (System.currentTimeMillis() - clickTime < 500) { //?? if (mCurrentSize == SURFACE_4_3) { //??? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); } else { //?? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); } } else { if (!mShowing) { showOverlay(); mSurfaceFrame.setFocusable(false); } else { hideOverlay(true); } } clickTime = System.currentTimeMillis(); if (directionLayout.getVisibility() == View.INVISIBLE) { directionLayout.setVisibility(View.VISIBLE); } else { directionLayout.setVisibility(View.INVISIBLE); } } return true; } }); mOrientationListener = new OrientationEventListener(this) { @Override public void onOrientationChanged(int rotation) { if (((rotation >= 0) && (rotation <= 45)) || (rotation >= 315) || ((rotation >= 135) && (rotation <= 225))) {//portrait mCurrentOrient = true; if (mCurrentOrient != mScreenProtrait) { mScreenProtrait = mCurrentOrient; setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); Log.d(TAG, "Screen orientation changed from Landscape to Portrait!"); } } else if (((rotation > 45) && (rotation < 135)) || ((rotation > 225) && (rotation < 315))) {//landscape mCurrentOrient = false; if (mCurrentOrient != mScreenProtrait) { mScreenProtrait = mCurrentOrient; setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); Log.d(TAG, "Screen orientation changed from Portrait to Landscape!"); } } } }; mOrientationListener.enable(); int pitch; String chroma = pref.getString("chroma_format", ""); if (Util.isGingerbreadOrLater() && chroma.equals("YV12")) { mSurfaceHolder.setFormat(ImageFormat.YV12); pitch = ImageFormat.getBitsPerPixel(ImageFormat.YV12) / 8; } else if (chroma.equals("RV16")) { mSurfaceHolder.setFormat(PixelFormat.RGB_565); PixelFormat info = new PixelFormat(); PixelFormat.getPixelFormatInfo(PixelFormat.RGB_565, info); pitch = info.bytesPerPixel; } else { mSurfaceHolder.setFormat(PixelFormat.RGBX_8888); PixelFormat info = new PixelFormat(); PixelFormat.getPixelFormatInfo(PixelFormat.RGBX_8888, info); pitch = info.bytesPerPixel; } mSurfaceAlign = 16 / pitch - 1; mSurfaceHolder.addCallback(mSurfaceCallback); mSeekbar = (SeekBar) findViewById(R.id.player_overlay_seekbar); mSeekbar.setOnSeekBarChangeListener(mSeekListener); mSwitchingView = false; mEndReached = false; // Clear the resume time, since it is only used for resumes in external // videos. SharedPreferences preferences = getSharedPreferences(PreferencesActivity.NAME, MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.putLong(PreferencesActivity.VIDEO_RESUME_TIME, -1); // Also clear the subs list, because it is supposed to be per session // only (like desktop VLC). We don't want the customs subtitle file // to persist forever with this video. editor.putString(PreferencesActivity.VIDEO_SUBTITLE_FILES, null); editor.commit(); IntentFilter filter = new IntentFilter(); filter.addAction(Intent.ACTION_BATTERY_CHANGED); filter.addAction(VLCApplication.SLEEP_INTENT); registerReceiver(mReceiver, filter); try { mLibVLC = Util.getLibVlcInstance(); } catch (LibVlcException e) { Log.d(TAG, "LibVLC initialisation failed"); return; } EventHandler em = EventHandler.getInstance(); em.addHandler(eventHandler); this.setVolumeControlStream(AudioManager.STREAM_MUSIC); // 100 is the value for screen_orientation_start_lock,?? // setRequestedOrientation(mScreenOrientation != 100 // ? mScreenOrientation // : getScreenOrientation()); //??? // setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); //?? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); }