Java tutorial
/* * Copyright (C) The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ecsm.android.readForMe.activities; import android.Manifest; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.hardware.Camera; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v4.app.ActivityCompat; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.ScaleGestureDetector; import android.view.View; import android.widget.ImageButton; import android.widget.TextView; import android.widget.Toast; import com.ecsm.android.readForMe.listener.OcrResultListener; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GoogleApiAvailability; import com.ecsm.android.readForMe.OcrDetectorProcessor; import com.ecsm.android.readForMe.OcrGraphic; import com.google.android.gms.samples.vision.ocrreader.R; import com.ecsm.android.readForMe.listener.SoundListener; import com.ecsm.android.readForMe.camera.CameraSource; import com.ecsm.android.readForMe.camera.CameraSourcePreview; import com.ecsm.android.readForMe.camera.GraphicOverlay; import com.ecsm.android.readForMe.util.ResultTextHolder; import com.ecsm.android.readForMe.util.SoundManager; import com.google.android.gms.vision.text.TextRecognizer; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Activity for the Ocr Detecting app. This app detects text and displays the value with the * rear facing camera. During detection overlay graphics are drawn to indicate the position, * size, and contents of each TextBlock. */ public final class OcrCaptureActivity extends AppCompatActivity implements SoundListener, OcrResultListener { // Constants used to pass extra data in the intent public static final String AutoFocus = "AutoFocus"; public static final String UseFlash = "UseFlash"; private static final String TAG = "OcrCaptureActivity"; // Intent request code to handle updating play services if needed. private static final int RC_HANDLE_GMS = 9001; // Permission request codes need to be < 256 private static final int RC_HANDLE_CAMERA_PERM = 2; private CameraSource mCameraSource; private CameraSourcePreview mPreview; private GraphicOverlay<OcrGraphic> mGraphicOverlay; // private ResultTextHolder mResultTextHolder; private TextView mStatusView; // private ImageButton mPlayPauseButton, mDetectTextButton; private boolean isPauseForSound, isPauseForButton; /// Sound Manager private SoundManager mSoundManager; /** * Initializes the UI and creates the detector pipeline. */ @Override public void onCreate(Bundle bundle) { super.onCreate(bundle); setContentView(R.layout.ocr_capture); // initiate resources initialResources(); // Set good defaults for capturing text. // boolean autoFocus = true; // boolean useFlash = false; // // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(true, false); } else { requestCameraPermission(); } Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); } private void initialResources() { mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); mPlayPauseButton = (ImageButton) findViewById(R.id.mPlayPauseButton); mStatusView = (TextView) findViewById(R.id.mStatusView); ImageButton prevButton = (ImageButton) findViewById(R.id.mPrevButton); ImageButton nextButton = (ImageButton) findViewById(R.id.mNextButton); /// listener mPlayPauseButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (isPauseForButton) { mPlayPauseButton.setImageResource(R.drawable.ic_pause); isPauseForButton = false; if (mTtsPointer < mOldList.size()) mSoundManager.speak(mOldList.get(mTtsPointer++), "islam"); } else { mPlayPauseButton.setImageResource(R.drawable.ic_play); isPauseForButton = true; mSoundManager.stopSpeaking(); } } }); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mSoundManager.stopSpeaking(); ++mTtsPointer; if (mTtsPointer < mOldList.size()) mSoundManager.speak(mOldList.get(mTtsPointer), "islam"); } }); prevButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mSoundManager.stopSpeaking(); --mTtsPointer; if (mTtsPointer >= 0) mSoundManager.speak(mOldList.get(mTtsPointer), "islam"); } }); // initiate the text container from the ocr result mResultTextHolder = new ResultTextHolder(this); //// initiate sound manager mSoundManager = new SoundManager(this, this); mSoundManager.initializeTTS(); /// mOldList = new ArrayList<>(); mNewList = new ArrayList<>(); /// } private List<String> mOldList, mNewList; /** * Handles the requesting of the camera permission. This includes * showing a "Snackbar" message of why the permission is needed then * sending the request. */ private void requestCameraPermission() { Log.w(TAG, "Camera permission is not granted. Requesting permission"); final String[] permissions = new String[] { Manifest.permission.CAMERA }; if (!ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) { ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM); return; } final Activity thisActivity = this; View.OnClickListener listener = new View.OnClickListener() { @Override public void onClick(View view) { ActivityCompat.requestPermissions(thisActivity, permissions, RC_HANDLE_CAMERA_PERM); } }; Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale, Snackbar.LENGTH_INDEFINITE) .setAction(R.string.ok, listener).show(); } /** * Creates and starts the camera. Note that this uses a higher resolution in comparison * to other detection examples to enable the ocr detector to detect small text samples * at long distances. * <p> * Suppressing InlinedApi since there is a check that the minimum version is met before using * the constant. */ @SuppressLint("InlinedApi") private void createCameraSource(boolean autoFocus, boolean useFlash) { Context context = getApplicationContext(); // A text recognizer is created to find text. An associated multi-processor instance // is set to receive the text recognition results, track the text, and maintain // graphics for each text block on screen. The factory is used by the multi-processor to // create a separate tracker instance for each text block. TextRecognizer textRecognizer = new TextRecognizer.Builder(context).build(); textRecognizer.setProcessor(new OcrDetectorProcessor(mGraphicOverlay, mResultTextHolder)); if (!textRecognizer.isOperational()) { // Note: The first time that an app using a Vision API is installed on a // device, GMS will download a native libraries to the device in order to do detection. // Usually this completes before the app is run for the first time. But if that // download has not yet completed, then the above call will not detect any text, // barcodes, or faces. // // isOperational() can be used to check if the required native libraries are currently // available. The detectors will automatically become operational once the library // downloads complete on device. Log.w(TAG, "Detector dependencies are not yet available."); // Check for low storage. If there is low storage, the native library will not be // downloaded, so detection will not become operational. IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW); boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null; if (hasLowStorage) { Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show(); Log.w(TAG, getString(R.string.low_storage_error)); } } // Creates and starts the camera. Note that this uses a higher resolution in comparison // to other detection examples to enable the text recognizer to detect small pieces of text. mCameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer) .setFacing(CameraSource.CAMERA_FACING_BACK).setStatusView(mStatusView) .setRequestedPreviewSize(1280, 1024).setRequestedFps(2.0f) .setFlashMode(useFlash ? Camera.Parameters.FLASH_MODE_TORCH : null) .setFocusMode(autoFocus ? Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE : null).build(); } /** * Restarts the camera. */ @Override protected void onResume() { super.onResume(); startCameraSource(); } /** * Stops the camera. */ @Override protected void onPause() { super.onPause(); if (mPreview != null) { mPreview.stop(); } } /** * Releases the resources associated with the camera source, the associated detectors, and the * rest of the processing pipeline. */ @Override protected void onDestroy() { super.onDestroy(); mSoundManager.onDestroy(); if (mPreview != null) { mPreview.release(); } } /** * Callback for the result from requesting permissions. This method * is invoked for every call on {@link #requestPermissions(String[], int)}. * <p> * <strong>Note:</strong> It is possible that the permissions request interaction * with the user is interrupted. In this case you will receive empty permissions * and results arrays which should be treated as a cancellation. * </p> * * @param requestCode The request code passed in {@link #requestPermissions(String[], int)}. * @param permissions The requested permissions. Never null. * @param grantResults The grant results for the corresponding permissions * which is either {@link PackageManager#PERMISSION_GRANTED} * or {@link PackageManager#PERMISSION_DENIED}. Never null. * @see #requestPermissions(String[], int) */ @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { if (requestCode != RC_HANDLE_CAMERA_PERM) { Log.d(TAG, "Got unexpected permission result: " + requestCode); super.onRequestPermissionsResult(requestCode, permissions, grantResults); return; } if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { Log.d(TAG, "Camera permission granted - initialize the camera source"); // we have permission, so create the camerasource boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); createCameraSource(autoFocus, useFlash); return; } Log.e(TAG, "Permission not granted: results len = " + grantResults.length + " Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)")); DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { finish(); } }; AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Multitracker sample").setMessage(R.string.no_camera_permission) .setPositiveButton(R.string.ok, listener).show(); } /** * Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet * (e.g., because onResume was called before the camera source was created), this will be called * again when the camera source is created. */ private void startCameraSource() throws SecurityException { // check that the device has play services available. int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(getApplicationContext()); if (code != ConnectionResult.SUCCESS) { Dialog dlg = GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS); dlg.show(); } if (mCameraSource != null) { try { mPreview.start(mCameraSource, mGraphicOverlay); } catch (IOException e) { Log.e(TAG, "Unable to start camera source.", e); mCameraSource.release(); mCameraSource = null; } } } @Override public void onSoundComplete() { if (!isPauseForSound) { // begin implement if (mTtsPointer < mOldList.size()) mSoundManager.speak(mOldList.get(mTtsPointer++), "islam"); ///stop } isPauseForSound = false; } @Override public void onSoundStop() { isPauseForSound = true; if (mTtsPointer > 0) mTtsPointer--; } private int mTtsPointer, mLastMatchPointer; @Override public synchronized void onResult(String newResult) { Log.e("Activity", newResult); mNewList = Arrays.asList(newResult.split("[\n .,]")); int oldListPointer = mLastMatchPointer, mNewListPointer = 0, numberOfMatches = 0; boolean flag = false; for (; oldListPointer < mOldList.size(); oldListPointer++) { if (mOldList.get(oldListPointer).equals(mNewList.get(mNewListPointer))) { mNewListPointer++; } else { mSoundManager.stopSpeaking(); mTtsPointer = mNewListPointer; mOldList = mNewList; break; } } if (mOldList.size() == 0) { mOldList = mNewList; mTtsPointer = 0; // mSoundManager.speak(mOldList.get(mTtsPointer++), "islam"); } if (mTtsPointer < mOldList.size()) mSoundManager.speak(mOldList.get(mTtsPointer++), "islam"); } private class ScaleListener implements ScaleGestureDetector.OnScaleGestureListener { /** * Responds to scaling events for a gesture in progress. * Reported by pointer motion. * * @param detector The detector reporting the event - use this to * retrieve extended info about event state. * @return Whether or not the detector should consider this event * as handled. If an event was not handled, the detector * will continue to accumulate movement until an event is * handled. This can be useful if an application, for example, * only wants to update scaling factors if the change is * greater than 0.01. */ @Override public boolean onScale(ScaleGestureDetector detector) { return false; } /** * Responds to the beginning of a scaling gesture. Reported by * new pointers going down. * * @param detector The detector reporting the event - use this to * retrieve extended info about event state. * @return Whether or not the detector should continue recognizing * this gesture. For example, if a gesture is beginning * with a focal point outside of a region where it makes * sense, onScaleBegin() may return false to ignore the * rest of the gesture. */ @Override public boolean onScaleBegin(ScaleGestureDetector detector) { return true; } /** * Responds to the end of a scale gesture. Reported by existing * pointers going up. * <p/> * Once a scale has ended, {@link ScaleGestureDetector#getFocusX()} * and {@link ScaleGestureDetector#getFocusY()} will return focal point * of the pointers remaining on the screen. * * @param detector The detector reporting the event - use this to * retrieve extended info about event state. */ @Override public void onScaleEnd(ScaleGestureDetector detector) { if (mCameraSource != null) { mCameraSource.doZoom(detector.getScaleFactor()); } } } }