Java tutorial
/* * Copyright 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kr.ac.kpu.wheeling.blackbox; import android.Manifest; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.app.Fragment; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.Matrix; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.MediaRecorder; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.CountDownTimer; import android.os.Environment; import android.os.Handler; import android.os.HandlerThread; import android.os.Message; import android.os.SystemClock; import android.support.annotation.NonNull; import android.support.v13.app.FragmentCompat; import android.support.v4.app.ActivityCompat; import android.support.v4.app.FragmentTransaction; import android.util.Log; import android.util.Size; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.Chronometer; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.Toast; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import kr.ac.kpu.wheeling.R; import kr.ac.kpu.wheeling.blackbox.gallery.GalleryActivity; import kr.ac.kpu.wheeling.helper.SQLiteHandler; import kr.ac.kpu.wheeling.tracker.TrackerFragment; public class Camera2VideoFragment extends Fragment implements View.OnClickListener, FragmentCompat.OnRequestPermissionsResultCallback { private static final int START_RECORDING = 1; private static final int STOP_RECORDING = 2; private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90; private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270; private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray(); private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray(); private static final String TAG = "Camera2VideoFragment"; private static final int REQUEST_VIDEO_PERMISSIONS = 100; private static final String FRAGMENT_DIALOG = "dialog"; private static final String[] VIDEO_PERMISSIONS = { Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.ACCESS_FINE_LOCATION }; static { DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90); DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0); DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270); DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180); } static { INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270); INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180); INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90); INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0); } private Chronometer chronometer; private boolean mIsSensorChanged; private boolean mSensorMode = true; private SensorManager sensorManager; private Sensor sensor; private Intent galleryActivity; private CountDownTimer mTimer = null; private Button mButtonGallery; private Button mButtonSetting; private ScreenInfo screenInfo; /** * An {@link AutoFitTextureView} for camera preview. */ private AutoFitTextureView mTextureView; /** * Button to record video */ private Button mButtonVideo; private Button mButtonVisible; private Button mButtonRiding; /** * A reference to the opened {@link android.hardware.camera2.CameraDevice}. */ private CameraDevice mCameraDevice; /** * A reference to the current {@link android.hardware.camera2.CameraCaptureSession} for * preview. */ private CameraCaptureSession mPreviewSession; /** * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a * {@link TextureView}. */ private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { openCamera(width, height); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) { configureTransform(width, height); } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { return true; } @Override public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { } }; //--------------------------------------------------------------- private LinearLayout tracker_layout; private boolean mIsTracker_Visible = true; Button btn_btest; //private static final String bowner = "???"; private SQLiteHandler sqLiteHandler; private String bowner; private String fileName; //----------------------------------------------------------------- /** * The {@link android.util.Size} of camera preview. */ private Size mPreviewSize; /** * The {@link android.util.Size} of video recording. */ private Size mVideoSize; /** * MediaRecorder */ private MediaRecorder mMediaRecorder; /** * Whether the app is recording video now */ private boolean mIsRecordingVideo; /** * An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread mBackgroundThread; /** * A {@link Handler} for running tasks in the background. */ private Handler mBackgroundHandler; /** * A {@link Semaphore} to prevent the app from exiting before closing the camera. */ private Semaphore mCameraOpenCloseLock = new Semaphore(1); /** * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its status. */ private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(CameraDevice cameraDevice) { mCameraDevice = cameraDevice; startPreview(); mCameraOpenCloseLock.release(); if (null != mTextureView) { configureTransform(mTextureView.getWidth(), mTextureView.getHeight()); } } @Override public void onDisconnected(CameraDevice cameraDevice) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; } @Override public void onError(CameraDevice cameraDevice, int error) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; Activity activity = getActivity(); if (null != activity) { activity.finish(); } } }; private Integer mSensorOrientation; private String mNextVideoAbsolutePath; private CaptureRequest.Builder mPreviewBuilder; private Surface mRecorderSurface; public static Camera2VideoFragment newInstance() { return new Camera2VideoFragment(); } /** * In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes * larger than 1080p, since MediaRecorder cannot handle such a high-resolution video. * * @param choices The list of available sizes * @return The video size */ private static Size chooseVideoSize(Size[] choices) { for (Size size : choices) { Log.e("INFO", "find Width: " + size.getWidth() + " Height: " + size.getHeight()); if (size.getWidth() == size.getHeight() * 16 / 9 && size.getWidth() <= 1920) { Log.e("INFO", "Width: " + size.getWidth() + " Height: " + size.getHeight()); return size; } } Log.e(TAG, "Couldn't find any suitable video size"); return choices[choices.length - 1]; } /** * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * width and height are at least as large as the respective requested values, and whose aspect * ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output class * @param width The minimum desired width * @param height The minimum desired height * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<Size>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); Log.d("INFO", "w, h: " + w + h); for (Size option : choices) { Log.d("SIZE", "option Width= " + option.getWidth() + " option Height= " + option.getHeight()); if (option.getHeight() == option.getWidth() * h / w && option.getWidth() >= w && option.getHeight() >= h) { Log.d("SELECTED SIZE", "option Width= " + option.getWidth() + " option Height= " + option.getHeight()); bigEnough.add(option); } } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { sensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE); sensor = sensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); return inflater.inflate(kr.ac.kpu.wheeling.R.layout.fragment_camera2_video, container, false); } @Override public void onViewCreated(final View view, Bundle savedInstanceState) { mTextureView = (AutoFitTextureView) view.findViewById(kr.ac.kpu.wheeling.R.id.texture); //------------------------------------------- tracker_layout = (LinearLayout) view.findViewById(R.id.tracker_layout); mButtonVisible = (Button) view.findViewById(R.id.btn_visible); mButtonVisible.setOnClickListener(this); mButtonRiding = (Button) view.findViewById(R.id.btn_visible_riding); mButtonRiding.setOnClickListener(this); btn_btest = (Button) view.findViewById(R.id.btn_btest); btn_btest.setOnClickListener(this); sqLiteHandler = new SQLiteHandler(getActivity()); //---------------------------------------------- mButtonVideo = (Button) view.findViewById(kr.ac.kpu.wheeling.R.id.video); mButtonVideo.setOnClickListener(this); mButtonSetting = (Button) view.findViewById(kr.ac.kpu.wheeling.R.id.interval); mButtonSetting.setOnClickListener(this); //view.findViewById(R.id.info).setOnClickListener(this); chronometer = (Chronometer) view.findViewById(kr.ac.kpu.wheeling.R.id.chronometer); sensorManager.registerListener(gyroListener, sensor, SensorManager.SENSOR_DELAY_NORMAL); mButtonGallery = (Button) view.findViewById(kr.ac.kpu.wheeling.R.id.menu); mButtonGallery.setOnClickListener(this); galleryActivity = new Intent(getActivity(), GalleryActivity.class); chronometer.setOnChronometerTickListener(new Chronometer.OnChronometerTickListener() { @Override public void onChronometerTick(Chronometer chronometer) { if ((int) (((SystemClock.elapsedRealtime() - chronometer.getBase()))) > 10500) { try { stopRecordingVideo(); mIsSensorChanged = false; } catch (Exception e) { } chronometer.stop(); } } }); } @Override public void onResume() { super.onResume(); startBackgroundThread(); if (mTextureView.isAvailable()) { openCamera(mTextureView.getWidth(), mTextureView.getHeight()); } else { mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); } } @Override public void onPause() { closeCamera(); stopBackgroundThread(); super.onPause(); } @Override public void onClick(View view) { switch (view.getId()) { case kr.ac.kpu.wheeling.R.id.video: { if (mIsRecordingVideo) { stopRecordingVideo(); } else { startRecordingVideo(); } break; } case kr.ac.kpu.wheeling.R.id.interval: { if (mIsRecordingVideo) { stopRecordingVideo(); } else { startIntervalRecording(10000); } break; } case R.id.btn_visible: { if (mIsTracker_Visible) { tracker_layout.setVisibility(View.GONE); mButtonGallery.setVisibility(View.GONE); mButtonRiding.setVisibility(View.VISIBLE); mIsTracker_Visible = false; } else { tracker_layout.setVisibility(View.VISIBLE); mIsTracker_Visible = true; } break; } case R.id.btn_visible_riding: { if (mIsTracker_Visible == false) { tracker_layout.setVisibility(View.VISIBLE); mButtonGallery.setVisibility(View.VISIBLE); mButtonRiding.setVisibility(View.GONE); mIsTracker_Visible = true; } break; } case R.id.menu: { if (mIsRecordingVideo) { stopRecordingVideo(); } closeCamera(); startActivity(galleryActivity); break; } case R.id.btn_btest: { Log.d(TAG, "sqLiteHandler.getUserDetails().get(\"name\")" + sqLiteHandler.getUserDetails().get("name")); // sqLiteHandler.getUserDetails().get("email"); sqLiteHandler.addblackbox(bowner, fileName); break; } /* case R.id.info: { Activity activity = getActivity(); if (null != activity) { new AlertDialog.Builder(activity) .setMessage(R.string.intro_message) .setPositiveButton(android.R.string.ok, null) .show(); } break; } */ } } /** * Starts a background thread and its {@link Handler}. */ private void startBackgroundThread() { mBackgroundThread = new HandlerThread("CameraBackground"); mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); } /** * Stops the background thread and its {@link Handler}. */ private void stopBackgroundThread() { mBackgroundThread.quitSafely(); try { mBackgroundThread.join(); mBackgroundThread = null; mBackgroundHandler = null; } catch (InterruptedException e) { e.printStackTrace(); } } /** * Gets whether you should show UI with rationale for requesting permissions. * * @param permissions The permissions your app wants to request. * @return Whether you can show permission rationale UI. */ private boolean shouldShowRequestPermissionRationale(String[] permissions) { for (String permission : permissions) { if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) { return true; } } return false; } /** * Requests permissions needed for recording video. */ private void requestVideoPermissions() { if (shouldShowRequestPermissionRationale(VIDEO_PERMISSIONS)) { new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG); } else { FragmentCompat.requestPermissions(this, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS); } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { Log.d(TAG, "onRequestPermissionsResult"); if (requestCode == REQUEST_VIDEO_PERMISSIONS) { if (grantResults.length == VIDEO_PERMISSIONS.length) { for (int result : grantResults) { if (result != PackageManager.PERMISSION_GRANTED) { ErrorDialog.newInstance(getString(kr.ac.kpu.wheeling.R.string.permission_request)) .show(getChildFragmentManager(), FRAGMENT_DIALOG); break; } } } else { ErrorDialog.newInstance(getString(kr.ac.kpu.wheeling.R.string.permission_request)) .show(getChildFragmentManager(), FRAGMENT_DIALOG); } } else { super.onRequestPermissionsResult(requestCode, permissions, grantResults); } } private boolean hasPermissionsGranted(String[] permissions) { for (String permission : permissions) { if (ActivityCompat.checkSelfPermission(getActivity(), permission) != PackageManager.PERMISSION_GRANTED) { return false; } } return true; } /** * Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`. */ private void openCamera(int width, int height) { if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) { requestVideoPermissions(); return; } final Activity activity = getActivity(); if (null == activity || activity.isFinishing()) { return; } CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { Log.d(TAG, "tryAcquire"); if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } String cameraId = manager.getCameraIdList()[0]; // Choose the sizes for camera preview and video recording CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class)); mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize); Log.d("INFO", "Width and Height: " + width + height); Log.d("INFO", "Video Height: " + mVideoSize.getHeight() + " Width: " + mVideoSize.getWidth()); Log.d("INFO", "Preview Height: " + mPreviewSize.getHeight() + " Width: " + mPreviewSize.getWidth()); screenInfo = new ScreenInfo(); screenInfo.setNoSoftKeyScreenInfo(activity); Log.d("SCREEN", screenInfo.toString()); int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } configureTransform(width, height); mMediaRecorder = new MediaRecorder(); manager.openCamera(cameraId, mStateCallback, null); } catch (CameraAccessException e) { Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show(); activity.finish(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(kr.ac.kpu.wheeling.R.string.camera_error)) .show(getChildFragmentManager(), FRAGMENT_DIALOG); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening."); } catch (SecurityException e) { } } private void closeCamera() { try { mCameraOpenCloseLock.acquire(); closePreviewSession(); if (null != mCameraDevice) { mCameraDevice.close(); mCameraDevice = null; } if (null != mMediaRecorder) { mMediaRecorder.release(); mMediaRecorder = null; } } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera closing."); } finally { mCameraOpenCloseLock.release(); } } /** * Start the camera preview. */ private void startPreview() { if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) { return; } try { closePreviewSession(); SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); Surface previewSurface = new Surface(texture); mPreviewBuilder.addTarget(previewSurface); mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(CameraCaptureSession cameraCaptureSession) { mPreviewSession = cameraCaptureSession; updatePreview(); } @Override public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) { Activity activity = getActivity(); if (null != activity) { Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show(); } } }, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } /** * Update the camera preview. {@link #startPreview()} needs to be called in advance. */ private void updatePreview() { if (null == mCameraDevice) { return; } try { setUpCaptureRequestBuilder(mPreviewBuilder); HandlerThread thread = new HandlerThread("CameraPreview"); thread.start(); mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) { builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); } /** * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`. * This method should not to be called until the camera preview size is determined in * openCamera, or until the size of `mTextureView` is fixed. * * @param viewWidth The width of `mTextureView` * @param viewHeight The height of `mTextureView` */ private void configureTransform(int viewWidth, int viewHeight) { Activity activity = getActivity(); if (null == mTextureView || null == mPreviewSize || null == activity) { return; } int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); Matrix matrix = new Matrix(); RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY); } mTextureView.setTransform(matrix); } private void setUpMediaRecorder() throws IOException { final Activity activity = getActivity(); if (null == activity) { return; } mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4); if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) { mNextVideoAbsolutePath = getVideoFilePath(getActivity()); } mMediaRecorder.setOutputFile(mNextVideoAbsolutePath); mMediaRecorder.setVideoEncodingBitRate(16900000); mMediaRecorder.setAudioEncodingBitRate(96100); mMediaRecorder.setVideoFrameRate(30); mMediaRecorder.setAudioSamplingRate(48000); mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight()); mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC); int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); switch (mSensorOrientation) { case SENSOR_ORIENTATION_DEFAULT_DEGREES: mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation)); break; case SENSOR_ORIENTATION_INVERSE_DEGREES: mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation)); break; } mMediaRecorder.prepare(); } public static boolean canWritable() { boolean mExternalStorageAvailable = false; boolean mExternalStorageWriteable = false; String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state)) { // We can read and write the media mExternalStorageAvailable = mExternalStorageWriteable = true; Log.d("STORAGE", "WRITEABLE" + " Available: " + mExternalStorageAvailable + ", Writeable: " + mExternalStorageWriteable); return true; } else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) { // We can only read the media mExternalStorageAvailable = true; mExternalStorageWriteable = false; Log.d("STORAGE", "READABLE" + " Available: " + mExternalStorageAvailable + ", Writeable: " + mExternalStorageWriteable); return false; } else { // Something else is wrong. It may be one of many other states, but all we need // to know is we can neither read nor write mExternalStorageAvailable = mExternalStorageWriteable = false; Log.e("STORAGE", "ERROR" + " Available: " + mExternalStorageAvailable + ", Writeable: " + mExternalStorageWriteable); return false; } } private String getVideoFilePath(Context context) { fileName = "wheeling_" + System.currentTimeMillis() + ".mp4"; return getVideoStorageDir("wheeling").getAbsolutePath() + "/" + fileName; } public void startMediaScanning(Context context, String fileName) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { final Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + getVideoStorageDir("wheeling") + "/" + fileName)); //final Intent scanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE); //final Uri contentUri = Uri.fromFile(file); //scanIntent.setData(contentUri); context.sendBroadcast(intent); Log.d("BROADCAST", "Broadcast Complete!"); } else { final Intent intent = new Intent(Intent.ACTION_MEDIA_MOUNTED, Uri.parse("file://" + Environment.getExternalStorageDirectory())); context.sendBroadcast(intent); Log.d("BROADCAST", "Broadcast Complete!(Low ver)"); } } public File getVideoStorageDir(String albumName) { //File file = new File(Environment.getExternalStorageDirectory(), albumName); File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), albumName); if (canWritable() && !file.isDirectory()) { if (!file.mkdirs()) { Log.e("STORAGE", "Directory not created"); } } return file; } /* Checks if external storage is available for read and write */ public boolean isExternalStorageWritable() { String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state)) { return true; } return false; } public void startRecordingVideo() { bowner = sqLiteHandler.getUserDetails().get("email"); Log.d("bowner", "bowner : " + bowner); if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) { return; } try { closePreviewSession(); setUpMediaRecorder(); SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); List<Surface> surfaces = new ArrayList<>(); // Set up Surface for the camera preview Surface previewSurface = new Surface(texture); surfaces.add(previewSurface); mPreviewBuilder.addTarget(previewSurface); // Set up Surface for the MediaRecorder mRecorderSurface = mMediaRecorder.getSurface(); surfaces.add(mRecorderSurface); mPreviewBuilder.addTarget(mRecorderSurface); // Start a capture session // Once the session starts, we can update the UI and start recording mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { mPreviewSession = cameraCaptureSession; updatePreview(); getActivity().runOnUiThread(new Runnable() { @Override public void run() { // UI // mButtonVideo.setText(kr.ac.kpu.wheeling.R.string.stop); mIsRecordingVideo = true; // Start recording mMediaRecorder.start(); Log.d(TAG, "mMediaRecorder Started"); } }); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { Activity activity = getActivity(); if (null != activity) { Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show(); } } }, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } private void closePreviewSession() { if (mPreviewSession != null) { mPreviewSession.close(); mPreviewSession = null; } } public void stopRecordingVideo() { // UI mIsRecordingVideo = false; //mButtonVideo.setText(kr.ac.kpu.wheeling.R.string.record); // Stop recording mMediaRecorder.stop(); mMediaRecorder.reset(); sqLiteHandler.addblackbox(bowner, fileName); Activity activity = getActivity(); if (null != activity) { Toast.makeText(activity, "??? ?: " + mNextVideoAbsolutePath, Toast.LENGTH_SHORT).show(); Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath); } startMediaScanning(activity, fileName); mNextVideoAbsolutePath = null; startPreview(); } /** * Compares two {@code Size}s based on their areas. */ static class CompareSizesByArea implements Comparator<Size> { @Override public int compare(Size lhs, Size rhs) { // We cast here to ensure the multiplications won't overflow return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } public static class ErrorDialog extends DialogFragment { private static final String ARG_MESSAGE = "message"; public static ErrorDialog newInstance(String message) { ErrorDialog dialog = new ErrorDialog(); Bundle args = new Bundle(); args.putString(ARG_MESSAGE, message); dialog.setArguments(args); return dialog; } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { final Activity activity = getActivity(); return new AlertDialog.Builder(activity).setMessage(getArguments().getString(ARG_MESSAGE)) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { activity.finish(); } }).create(); } } public static class ConfirmationDialog extends DialogFragment { @Override public Dialog onCreateDialog(Bundle savedInstanceState) { final Fragment parent = getParentFragment(); return new AlertDialog.Builder(getActivity()).setMessage(kr.ac.kpu.wheeling.R.string.permission_request) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { FragmentCompat.requestPermissions(parent, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS); } }).setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { parent.getActivity().finish(); } }).create(); } } public SensorEventListener gyroListener = new SensorEventListener() { public void onAccuracyChanged(Sensor sensor, int acc) { } public void onSensorChanged(SensorEvent event) { float x = event.values[0]; float y = event.values[1]; float z = event.values[2]; float absx = Math.abs(x); float absy = Math.abs(y); float absz = Math.abs(z); if (absy >= 5 && !mIsRecordingVideo) { if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) { return; } if (mIsSensorChanged == false || mSensorMode == true) { Log.d("MESG", " ?"); try { Toast.makeText(getActivity(), "? ??. .", Toast.LENGTH_SHORT).show(); mMediaRecorder.setMaxDuration(10000); startRecordingVideo(); chronometer.setBase(SystemClock.elapsedRealtime()); chronometer.start(); mIsSensorChanged = true; } catch (Exception e) { } } } else if (absz >= 5 && !mIsRecordingVideo) { if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) { return; } if (mIsSensorChanged == false || mSensorMode == true) { Log.d("MESG", " ?"); try { Toast.makeText(getActivity(), " ??. .", Toast.LENGTH_SHORT).show(); mMediaRecorder.setMaxDuration(10000); startRecordingVideo(); chronometer.setBase(SystemClock.elapsedRealtime()); chronometer.start(); mIsSensorChanged = true; } catch (Exception e) { } } } else if (absx >= 5 && !mIsRecordingVideo) { Log.d("MESG", " ?"); try { Toast.makeText(getActivity(), "? ??. .", Toast.LENGTH_SHORT).show(); mMediaRecorder.setMaxDuration(10000); startRecordingVideo(); chronometer.setBase(SystemClock.elapsedRealtime()); chronometer.start(); mIsSensorChanged = true; } catch (Exception e) { } } //textX.setText("X : " + (int)x + " rad/s"); //textY.setText("Y : " + (int)y + " rad/s"); //textZ.setText("Z : " + (int)z + " rad/s"); } }; //http://arabiannight.tistory.com/entry/331 public Handler mHandler = new Handler() { public void handleMessage(Message msg) { switch (msg.what) { case START_RECORDING: { startRecordingVideo(); } case STOP_RECORDING: { stopRecordingVideo(); } } } }; public void startIntervalRecording(int ms) { mTimer = new CountDownTimer(ms, 1000) { boolean recordStart = false; @Override public void onTick(long millisUntilFinished) { if (!recordStart) { recordStart = true; startRecordingVideo(); } } @Override public void onFinish() { recordStart = false; if (mIsRecordingVideo) { stopRecordingVideo(); mTimer.start(); } } }; mTimer.start(); } }