List of usage examples for android.view ScaleGestureDetector ScaleGestureDetector
public ScaleGestureDetector(Context context, OnScaleGestureListener listener)
From source file:me.tigerhe.shoppingpal.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *///from w ww . jav a2 s. c o m @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.barcode_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = true; boolean useFlash = false; // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:com.venkatesan.das.cardmanager.OCRCaptureActivity.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setTheme(R.style.ActionBarTheme);//ww w. j a va 2s.c o m setContentView(R.layout.ocr_capture); getSupportActionBar().setTitle(title); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OCRGraphic>) findViewById(R.id.graphicOverlay); // Set good defaults for capturing text. boolean autoFocus = true; boolean useFlash = false; // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:com.hkbufyp.location_basedbookmarkingmobileapplication.OCRLibrary.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//* w ww . j a va 2 s . c om*/ @Override public void onCreate(Bundle bundle) { super.onCreate(bundle); setContentView(R.layout.ocr_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); // Set good defaults for capturing text. boolean autoFocus = true; boolean useFlash = false; // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Please tap the text block you want to capture", Snackbar.LENGTH_LONG) .show(); // TODO: Set up the Text To Speech engine. }
From source file:com.citesnap.android.app.ocr.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//*from w w w.j ava 2s. co m*/ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.ocr_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); mOcrDetectorProcessor = new OcrDetectorProcessor(mGraphicOverlay); // read parameters from the intent used to launch the activity. Util u = new Util(this); boolean autoFocus = u.getBooleanProperty(Util.AUTO_FOCUS); boolean useFlash = u.getBooleanProperty(Util.USE_FLASH); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:com.udacity.alexandris.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//* w w w.j a va 2 s . c o m*/ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.barcode_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); //noinspection unchecked mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:com.darryncampbell.genericscanwedge.genericscanwedge.GoogleVisionBarcode.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//*from w w w . ja v a2 s .c om*/ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.barcode_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); this.barcodeFormats = getIntent().getIntExtra("formats", 0); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:it.jaschke.alexandria.activity.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *///from w w w. j a va2 s.c o m @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.activity_barcode_capture); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, getResources().getString(R.string.capture_tip), Snackbar.LENGTH_LONG).show(); }
From source file:com.app.azza.ocr.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. */// w w w . j a va2 s. c om @Override public void onCreate(Bundle bundle) { super.onCreate(bundle); setContentView(R.layout.ocr_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); // Set good defaults for capturing text. boolean autoFocus = true; boolean useFlash = false; // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); // Set up the Text To Speech engine. TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() { @Override public void onInit(final int status) { if (status == TextToSpeech.SUCCESS) { Log.d("OnInitListener", "Text to speech engine started successfully."); tts.setLanguage(Locale.US); } else { Log.d("OnInitListener", "Error starting the text to speech engine."); } } }; tts = new TextToSpeech(this.getApplicationContext(), listener); }
From source file:it.jaschke.alexandria.barcodeDetection.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//* w ww .j a va2 s.com*/ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.barcode_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); View foundBook = findViewById(R.id.layout_foundbook); foundBook.findViewById(R.id.button_foundbook).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onTap(0, 0); } }); mGraphicOverlay.setFoundButton(foundBook); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:com.example.anant_dimri.cameratest.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *///from ww w . j a v a 2s . c o m @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.ocr_capture); getSupportActionBar().hide(); mPreview = (CameraSourcePreview) findViewById(R.id.preview); //mGraphicOverlay = (com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }