List of usage examples for android.graphics ImageFormat getBitsPerPixel
public static int getBitsPerPixel(int format)
From source file:com.mienaikoe.deltamonitor.CameraWatcherService.java
public void startRecording() { if (camera == null) { try {//from w ww .j ava 2 s. c om camera = Camera.open(); } catch (Exception ex) { Log.e(TAG, ex.getMessage()); ex.printStackTrace(); return; } if (camera == null) { Log.e(TAG, "Camera is null despite trying to allocate it. Stopping Service"); throw new IllegalStateException("DeltaMonitor was unable to allocate the camera."); } } try { Log.i(TAG, "==================Beginning to Record"); if (buffer == null) { Camera.Parameters parameters = CameraSizer.sizeUp(camera); size = parameters.getPreviewSize(); buffer = new byte[size.height * size.width * ImageFormat.getBitsPerPixel(parameters.getPreviewFormat()) / 8]; } camera.addCallbackBuffer(buffer); if (texture == null) { texture = getTexture(); } camera.setPreviewTexture(texture); detector.reset(); camera.setPreviewCallbackWithBuffer(previewCallback); camera.startPreview(); } catch (IOException ex) { Log.e(TAG, "IOException during recording setup " + ex.getMessage()); ex.printStackTrace(); } }
From source file:com.android.camera2.its.ItsUtils.java
public static byte[] getDataFromImage(Image image) throws ItsException { int format = image.getFormat(); int width = image.getWidth(); int height = image.getHeight(); byte[] data = null; // Read image data Plane[] planes = image.getPlanes();/* w ww .j a v a2 s . c om*/ // Check image validity if (!checkAndroidImageFormat(image)) { throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat()); } if (format == ImageFormat.JPEG) { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. ByteBuffer buffer = planes[0].getBuffer(); data = new byte[buffer.capacity()]; buffer.get(data); return data; } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR || format == ImageFormat.RAW10) { int offset = 0; data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { ByteBuffer buffer = planes[i].getBuffer(); int rowStride = planes[i].getRowStride(); int pixelStride = planes[i].getPixelStride(); int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d", format, i, width, height, rowStride, pixelStride)); // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. int w = (i == 0) ? width : width / 2; int h = (i == 0) ? height : height / 2; for (int row = 0; row < h; row++) { if (pixelStride == bytesPerPixel) { // Special case: optimized read of the entire row int length = w * bytesPerPixel; buffer.get(data, offset, length); // Advance buffer the remainder of the row stride buffer.position(buffer.position() + rowStride - length); offset += length; } else { // Generic case: should work for any pixelStride but slower. // Use intermediate buffer to avoid read byte-by-byte from // DirectByteBuffer, which is very bad for performance. // Also need avoid access out of bound by only reading the available // bytes in the bytebuffer. int readSize = rowStride; if (buffer.remaining() < readSize) { readSize = buffer.remaining(); } buffer.get(rowData, 0, readSize); if (pixelStride >= 1) { for (int col = 0; col < w; col++) { data[offset++] = rowData[col * pixelStride]; } } else { // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for // example with RAW10. Just copy the buffer, dropping any padding at // the end of the row. int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; System.arraycopy(rowData, 0, data, offset, length); offset += length; } } } } Logt.i(TAG, String.format("Done reading image, format %d", format)); return data; } else { throw new ItsException("Unsupported image format: " + format); } }
From source file:com.longle1.facedetection.MainActivity.java
@Override public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { // Now that the size is known, set up the camera parameters and begin // the preview. Camera.Parameters parameters = mCamera.getParameters(); parameters.setPreviewFpsRange(30000, 30000); // workaround due to the glass XE10 release List<Size> sizes = parameters.getSupportedPreviewSizes(); Size optimalSize = getOptimalPreviewSize(sizes, w, h); parameters.setPreviewSize(optimalSize.width, optimalSize.height); mCamera.setParameters(parameters);// w ww . j ava2 s . c o m if (previewCallback != null) { mCamera.setPreviewCallbackWithBuffer(previewCallback); Size size = parameters.getPreviewSize(); byte[] data = new byte[size.width * size.height * ImageFormat.getBitsPerPixel(parameters.getPreviewFormat()) / 8]; mCamera.addCallbackBuffer(data); } mCamera.startPreview(); }
From source file:org.videolan.vlc.gui.video.VideoPlayerActivity.java
@Override @TargetApi(Build.VERSION_CODES.HONEYCOMB) protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.player);//from ww w . j a v a2 s. c o m final MyHandler myHandler = new MyHandler(); final LayoutInflater inflater = LayoutInflater.from(this); msgContainer = (LinearLayout) findViewById(R.id.msg_container); int current_locID; if (ActivityDevice.current_locID == -1) current_locID = ActivityShiPin.current_locID; else current_locID = ActivityDevice.current_locID; ArrayList<Integer> locIDList = MainActivity.locIDList; HashMap<Integer, ArrayList<Integer>> loc_devMap = MainActivity.loc_devMap; dev_typeMap = MainActivity.dev_typeMap; HashMap<Integer, String> dev_nameMap = MainActivity.dev_nameMap; Log.e("*****localID", String.valueOf(current_locID)); if (current_locID != -1) { if (loc_devMap.containsKey(current_locID)) { devList = loc_devMap.get(current_locID); if (devList.size() != 0) { int sub = devList.get(0) - 0; final ArrayList<Integer> brokenList = new ArrayList<Integer>(); brokenList.add(0); int i; for (i = 0; i < devList.size(); i++) { int devID = devList.get(i); Log.e("@@@@@@@@", String.valueOf(i)); if (sub != devID - i) { brokenList.add(i); sub = devID - i; } //int typeID=dev_typeMap.get(devID); final View view = inflater.inflate(R.layout.senssor_msg, null); TextView type = (TextView) view.findViewById(R.id.type); TextView msg = (TextView) view.findViewById(R.id.msg); type.setText(dev_nameMap.get(devID) + ":"); msgContainer.addView(view); } brokenList.add(i); new Thread(new Runnable() { @Override public void run() { Log.e(">>>>>>>>>", "??"); String a = null; try { a = WebServiceUtil.getHd("sss", "njuptcloud"); } catch (Exception e1) { // TODO Auto-generated catch block //Toast.makeText(VideoPlayerActivity.this,"?a " , Toast.LENGTH_SHORT).show(); Log.e(">>>>>>>>>", "?a " + e1.getMessage()); e1.printStackTrace(); } if (a != null) { int i = 0; List<String> msgList = new ArrayList<String>(); while (i < (brokenList.size() - 1)) { try { //Toast.makeText(VideoPlayerActivity.this,"??... " , Toast.LENGTH_SHORT).show(); msgList.addAll(WebServiceUtil.getMs(a, devList.get(brokenList.get(i)), devList.get(brokenList.get(i + 1) - 1))); } catch (Exception e) { //Toast.makeText(VideoPlayerActivity.this,"?? " , Toast.LENGTH_SHORT).show(); e.printStackTrace(); } i++; } // List<String> resultList=new ArrayList<String>(); // resultList.add(object) Message msg = new Message(); //Toast.makeText(VideoPlayerActivity.this,msgList.get(0) , Toast.LENGTH_SHORT).show(); msg.obj = msgList; myHandler.sendMessage(msg); } else { Log.e(">>>>>>>", "??"); Message msg = new Message(); //Toast.makeText(VideoPlayerActivity.this,msgList.get(0) , Toast.LENGTH_SHORT).show(); msg.obj = null; myHandler.sendMessage(msg); } //Toast.makeText(VideoPlayerActivity.this, "??", Toast.LENGTH_SHORT).show(); } }).start(); } } } directionLayout = (LinearLayout) findViewById(R.id.direction_layout); left = (ImageButton) findViewById(R.id.left); left.setOnClickListener(directionListenr); bottom = (ImageButton) findViewById(R.id.bottom); bottom.setOnClickListener(directionListenr); top = (ImageButton) findViewById(R.id.top); top.setOnClickListener(directionListenr); right = (ImageButton) findViewById(R.id.right); right.setOnClickListener(directionListenr); SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(this); if (Util.isICSOrLater()) getWindow().getDecorView().findViewById(android.R.id.content) .setOnSystemUiVisibilityChangeListener(new OnSystemUiVisibilityChangeListener() { @Override public void onSystemUiVisibilityChange(int visibility) { if (visibility == mUiVisibility) return; setSurfaceSize(mVideoWidth, mVideoHeight, mVideoVisibleWidth, mVideoVisibleHeight, mSarNum, mSarDen); if (visibility == View.SYSTEM_UI_FLAG_VISIBLE && !mShowing) { showOverlay(); } mUiVisibility = visibility; } }); /** initialize Views an their Events */ mOverlayHeader = findViewById(R.id.player_overlay_header); mOverlayHeader.setVisibility(View.GONE); mOverlayLock = findViewById(R.id.lock_overlay); mOverlayOption = findViewById(R.id.option_overlay); mOverlayProgress = findViewById(R.id.progress_overlay); mOverlayInterface = findViewById(R.id.interface_overlay); play_lay = (RelativeLayout) findViewById(R.id.play_lay); progress_lay = (LinearLayout) findViewById(R.id.progress_lay); /* header */ mTitle = (TextView) findViewById(R.id.player_overlay_title); mSysTime = (TextView) findViewById(R.id.player_overlay_systime); mBattery = (TextView) findViewById(R.id.player_overlay_battery); // Position and remaining time mTime = (TextView) findViewById(R.id.player_overlay_time); mTime.setOnClickListener(mRemainingTimeListener); mLength = (TextView) findViewById(R.id.player_overlay_length); mLength.setOnClickListener(mRemainingTimeListener); // the info textView is not on the overlay mInfo = (TextView) findViewById(R.id.player_overlay_info); mEnableWheelbar = pref.getBoolean("enable_wheel_bar", false); mEnableBrightnessGesture = pref.getBoolean("enable_brightness_gesture", true); mScreenOrientation = Integer .valueOf(pref.getString("screen_orientation_value", "4" /*SCREEN_ORIENTATION_SENSOR*/)); mControls = mEnableWheelbar ? new PlayerControlWheel(this) : new PlayerControlClassic(this); mControls.setOnPlayerControlListener(mPlayerControlListener); FrameLayout mControlContainer = (FrameLayout) findViewById(R.id.player_control); mControlContainer.addView((View) mControls); mAudioTrack = (ImageButton) findViewById(R.id.player_overlay_audio); mAudioTrack.setVisibility(View.GONE); mSubtitle = (ImageButton) findViewById(R.id.player_overlay_subtitle); mSubtitle.setVisibility(View.GONE); mHandler.postDelayed(new Runnable() { @Override public void run() { /*FIXME * The setTracksAndSubtitles method probably doesn't work in case of many many Tracks and Subtitles * Moreover, in a video stream, if Tracks & Subtitles change, they won't be updated */ setESTrackLists(); } }, 1500); mLock = (ImageButton) findViewById(R.id.lock_overlay_button); mLock.setOnClickListener(mLockListener); mSize = (ImageButton) findViewById(R.id.player_overlay_size); mSize.setOnClickListener(mSizeListener); snapshot_lay = (LinearLayout) findViewById(R.id.snapshot_lay);//?? radio_onOrPause_lay = (LinearLayout) findViewById(R.id.radio_onOrPause_lay);//??? record_lay = (LinearLayout) findViewById(R.id.record_lay);// voice_lay = (LinearLayout) findViewById(R.id.voice_lay);// mSnapShot = (ImageButton) findViewById(R.id.snapshot_overlay_button); mSnapShot.setOnClickListener(mSnapShotListener); mRecord = (ImageButton) findViewById(R.id.record_overlay_button); mRecord.setOnClickListener(mRecordListener); //??? mRadio = (ImageButton) findViewById(R.id.radio_onOrPause_button); mRadio_tv = (TextView) findViewById(R.id.radio_onOrPause_tv); mRadio.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub if (mRadio.getBackground().getConstantState() == getResources().getDrawable(R.drawable.radio_on1) .getConstantState()) { //????? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mRadio.setBackgroundDrawable(getResources().getDrawable(R.drawable.radio_pause)); } else { mRadio.setBackground(getResources().getDrawable(R.drawable.radio_pause)); } mRadio_tv.setText("?"); play(); } else { //???? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mRadio.setBackgroundDrawable(getResources().getDrawable(R.drawable.radio_on1)); } else { mRadio.setBackground(getResources().getDrawable(R.drawable.radio_on1)); } mRadio_tv.setText("?"); pause(); } } }); mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE); mAudioMax = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC); // mVoice = (ImageButton) findViewById(R.id.voice_overlay_button); mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mAudioMax, 0); mVoice.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub if (mVoice.getBackground().getConstantState() == getResources().getDrawable(R.drawable.voice_on) .getConstantState()) { //?? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mVoice.setBackgroundDrawable(getResources().getDrawable(R.drawable.voice_off)); } else { mVoice.setBackground(getResources().getDrawable(R.drawable.voice_off)); } mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0); } else { //?? if (sdk < android.os.Build.VERSION_CODES.JELLY_BEAN) { mVoice.setBackgroundDrawable(getResources().getDrawable(R.drawable.voice_on)); } else { mVoice.setBackground(getResources().getDrawable(R.drawable.voice_on)); } mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mAudioMax, 0); } } }); mSurface = (SurfaceView) findViewById(R.id.player_surface); mSurfaceHolder = mSurface.getHolder(); mSurfaceFrame = (FrameLayout) findViewById(R.id.player_surface_frame); mSurfaceFrame.setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { // TODO Auto-generated method stub if (MotionEvent.ACTION_DOWN == event.getAction()) { if (System.currentTimeMillis() - clickTime < 500) { //?? if (mCurrentSize == SURFACE_4_3) { //??? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); } else { //?? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); } } else { if (!mShowing) { showOverlay(); mSurfaceFrame.setFocusable(false); } else { hideOverlay(true); } } clickTime = System.currentTimeMillis(); if (directionLayout.getVisibility() == View.INVISIBLE) { directionLayout.setVisibility(View.VISIBLE); } else { directionLayout.setVisibility(View.INVISIBLE); } } return true; } }); mOrientationListener = new OrientationEventListener(this) { @Override public void onOrientationChanged(int rotation) { if (((rotation >= 0) && (rotation <= 45)) || (rotation >= 315) || ((rotation >= 135) && (rotation <= 225))) {//portrait mCurrentOrient = true; if (mCurrentOrient != mScreenProtrait) { mScreenProtrait = mCurrentOrient; setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); Log.d(TAG, "Screen orientation changed from Landscape to Portrait!"); } } else if (((rotation > 45) && (rotation < 135)) || ((rotation > 225) && (rotation < 315))) {//landscape mCurrentOrient = false; if (mCurrentOrient != mScreenProtrait) { mScreenProtrait = mCurrentOrient; setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); Log.d(TAG, "Screen orientation changed from Portrait to Landscape!"); } } } }; mOrientationListener.enable(); int pitch; String chroma = pref.getString("chroma_format", ""); if (Util.isGingerbreadOrLater() && chroma.equals("YV12")) { mSurfaceHolder.setFormat(ImageFormat.YV12); pitch = ImageFormat.getBitsPerPixel(ImageFormat.YV12) / 8; } else if (chroma.equals("RV16")) { mSurfaceHolder.setFormat(PixelFormat.RGB_565); PixelFormat info = new PixelFormat(); PixelFormat.getPixelFormatInfo(PixelFormat.RGB_565, info); pitch = info.bytesPerPixel; } else { mSurfaceHolder.setFormat(PixelFormat.RGBX_8888); PixelFormat info = new PixelFormat(); PixelFormat.getPixelFormatInfo(PixelFormat.RGBX_8888, info); pitch = info.bytesPerPixel; } mSurfaceAlign = 16 / pitch - 1; mSurfaceHolder.addCallback(mSurfaceCallback); mSeekbar = (SeekBar) findViewById(R.id.player_overlay_seekbar); mSeekbar.setOnSeekBarChangeListener(mSeekListener); mSwitchingView = false; mEndReached = false; // Clear the resume time, since it is only used for resumes in external // videos. SharedPreferences preferences = getSharedPreferences(PreferencesActivity.NAME, MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.putLong(PreferencesActivity.VIDEO_RESUME_TIME, -1); // Also clear the subs list, because it is supposed to be per session // only (like desktop VLC). We don't want the customs subtitle file // to persist forever with this video. editor.putString(PreferencesActivity.VIDEO_SUBTITLE_FILES, null); editor.commit(); IntentFilter filter = new IntentFilter(); filter.addAction(Intent.ACTION_BATTERY_CHANGED); filter.addAction(VLCApplication.SLEEP_INTENT); registerReceiver(mReceiver, filter); try { mLibVLC = Util.getLibVlcInstance(); } catch (LibVlcException e) { Log.d(TAG, "LibVLC initialisation failed"); return; } EventHandler em = EventHandler.getInstance(); em.addHandler(eventHandler); this.setVolumeControlStream(AudioManager.STREAM_MUSIC); // 100 is the value for screen_orientation_start_lock,?? // setRequestedOrientation(mScreenOrientation != 100 // ? mScreenOrientation // : getScreenOrientation()); //??? // setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); //?? setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); }
From source file:com.almalence.opencam.ApplicationScreen.java
@Override public void configureCamera(boolean createGUI) { CameraController.updateCameraFeatures(); // ----- Select preview dimensions with ratio correspondent to // full-size image ApplicationScreen.getPluginManager().setCameraPreviewSize(); // prepare list of surfaces to be used in capture requests if (!CameraController.isRemoteCamera()) { if (CameraController.isUseCamera2()) configureCamera2Camera(captureFormat); else {// w w w .j av a 2 s . co m Camera.Size sz = CameraController.getCameraParameters().getPreviewSize(); Log.e("ApplicationScreen", "Viewfinder preview size: " + sz.width + "x" + sz.height); guiManager.setupViewfinderPreviewSize(new CameraController.Size(sz.width, sz.height)); double bufferSize = sz.width * sz.height * ImageFormat.getBitsPerPixel(CameraController.getCameraParameters().getPreviewFormat()) / 8.0d; CameraController.allocatePreviewBuffer(bufferSize); CameraController.getCamera().setErrorCallback(CameraController.getInstance()); onCameraConfigured(); } } else { guiManager.setupViewfinderPreviewSize( new CameraController.Size(((SimpleStreamSurfaceView) preview).getSurfaceWidth(), ((SimpleStreamSurfaceView) preview).getSurfaceHeight())); onCameraConfigured(); } if (createGUI) { ApplicationScreen.getPluginManager().onGUICreate(); ApplicationScreen.getGUIManager().onGUICreate(); } }