List of usage examples for android.view GestureDetector GestureDetector
public GestureDetector(Context context, OnGestureListener listener)
From source file:com.example.sanya.likhawat_v1.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *///from ww w . ja v a 2s .c om @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.ocr_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:com.farmerbb.taskbar.activity.HomeActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().setFlags(WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS, WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); View view = new View(this); view.setOnClickListener(view1 -> LocalBroadcastManager.getInstance(HomeActivity.this) .sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_START_MENU"))); view.setOnLongClickListener(view12 -> { setWallpaper();/*from ww w . jav a 2s .co m*/ return false; }); view.setOnGenericMotionListener((view13, motionEvent) -> { if (motionEvent.getAction() == MotionEvent.ACTION_BUTTON_PRESS && motionEvent.getButtonState() == MotionEvent.BUTTON_SECONDARY) { setWallpaper(); } return false; }); final GestureDetector detector = new GestureDetector(this, new GestureDetector.OnGestureListener() { @Override public boolean onSingleTapUp(MotionEvent e) { return false; } @Override public void onShowPress(MotionEvent e) { } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { return false; } @Override public void onLongPress(MotionEvent e) { } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { return false; } @Override public boolean onDown(MotionEvent e) { return false; } }); detector.setOnDoubleTapListener(new GestureDetector.OnDoubleTapListener() { @Override public boolean onDoubleTap(MotionEvent e) { final SharedPreferences pref = U.getSharedPreferences(HomeActivity.this); if (!pref.getBoolean("dont_show_double_tap_dialog", false)) { if (pref.getBoolean("double_tap_to_sleep", false)) { U.lockDevice(HomeActivity.this); } else { int theme = -1; switch (pref.getString("theme", "light")) { case "light": theme = R.style.AppTheme; break; case "dark": theme = R.style.AppTheme_Dark; break; } AlertDialog.Builder builder = new AlertDialog.Builder( new ContextThemeWrapper(HomeActivity.this, theme)); builder.setTitle(R.string.double_tap_to_sleep) .setMessage(R.string.enable_double_tap_to_sleep) .setNegativeButton( pref.getBoolean("double_tap_dialog_shown", false) ? R.string.action_dont_show_again : R.string.action_cancel, (dialog, which) -> pref.edit() .putBoolean(pref.getBoolean("double_tap_dialog_shown", false) ? "dont_show_double_tap_dialog" : "double_tap_dialog_shown", true) .apply()) .setPositiveButton(R.string.action_ok, (dialog, which) -> { pref.edit().putBoolean("double_tap_to_sleep", true).apply(); U.lockDevice(HomeActivity.this); }); AlertDialog dialog = builder.create(); dialog.show(); } } return false; } @Override public boolean onDoubleTapEvent(MotionEvent e) { return false; } @Override public boolean onSingleTapConfirmed(MotionEvent e) { return false; } }); view.setOnTouchListener((v, event) -> { detector.onTouchEvent(event); return false; }); setContentView(view); LocalBroadcastManager.getInstance(this).registerReceiver(killReceiver, new IntentFilter("com.farmerbb.taskbar.KILL_HOME_ACTIVITY")); LocalBroadcastManager.getInstance(this).registerReceiver(forceTaskbarStartReceiver, new IntentFilter("com.farmerbb.taskbar.FORCE_TASKBAR_RESTART")); }
From source file:nl.achan.apps.sbb_spelgids.scanner.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *///ww w . j ava 2 s . c o m @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.barcode_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, true); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); }
From source file:com.example.ocr.linkfetcherocr.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *///from w w w . j av a 2s . co m @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.ocr_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); // To get a back button when capturing a picture getSupportActionBar().setDisplayHomeAsUpEnabled(true); }
From source file:it.jaschke.alexandria.CameraPreview.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *///w ww. j a v a 2 s . c o m @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.barcode_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); // Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", // Snackbar.LENGTH_LONG) // .show(); }
From source file:com.amazon.appstream.sampleclient.SampleClientActivity.java
/** * Initialization. Sets up the app and spawns the connection * dialog.//from ww w . j a va2s . c o m */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.v(TAG, "onCreate"); mGestureDetector = new GestureDetector(this, this); mGestureDetector.setIsLongpressEnabled(false); mTouchscreenAvailable = getPackageManager().hasSystemFeature("android.hardware.touchscreen"); Log.v(TAG, "Touch screen available: " + mTouchscreenAvailable); SharedPreferences prefs = getSharedPreferences("main", MODE_PRIVATE); if (prefs.contains(SERVER_ADDRESS)) { mServerAddress = prefs.getString(SERVER_ADDRESS, null); } if (prefs.contains(DES_SERVER_ADDRESS)) { mDESServerAddress = prefs.getString(DES_SERVER_ADDRESS, null); } if (prefs.contains(USE_APP_SERVER)) { mUseAppServer = prefs.getBoolean(USE_APP_SERVER, false); } if (prefs.contains(APP_ID)) { mAppId = prefs.getString(APP_ID, null); } if (prefs.contains(USER_ID)) { mUserId = prefs.getString(USER_ID, null); } requestWindowFeature(Window.FEATURE_NO_TITLE); }
From source file:com.ohbrothers.www.accountbook.ocr.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//*w w w . ja v a2 s . c om*/ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.ocr_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, R.string.tap_to_capture, Snackbar.LENGTH_LONG).show(); }
From source file:com.example.paulogabriel.test_app.BarcodeCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//*from ww w . ja va 2s .c o m*/ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.barcode_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); /*Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG) .show();*/ }
From source file:de.damdi.fitness.activity.create_workout.ExerciseTypeDetailFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_exercisetype_detail, container, false); // show the current exercise ImageView imageview = (ImageView) rootView.findViewById(R.id.imageview); // set gesture detector this.mGestureScanner = new GestureDetector(this.getActivity(), new ExerciseDetailOnGestureListener(this, imageview, mExercise)); // Images/* w ww.j ava 2s . co m*/ if (!mExercise.getImagePaths().isEmpty()) { DataHelper data = new DataHelper(getActivity()); imageview.setImageDrawable(data.getDrawable(mExercise.getImagePaths().get(0).toString())); } else { imageview.setImageResource(R.drawable.ic_launcher); } rootView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return mGestureScanner.onTouchEvent(event); } }); return rootView; }
From source file:argusui.com.argus.OcrCaptureActivity.java
/** * Initializes the UI and creates the detector pipeline. *//* w ww . ja va 2 s.com*/ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.ocr_capture); mPreview = (CameraSourcePreview) findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay); // read parameters from the intent used to launch the activity. boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false); boolean useFlash = getIntent().getBooleanExtra(UseFlash, false); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(this, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener()); Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show(); // TODO: Set up the Text To Speech engine. TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() { @Override public void onInit(final int status) { if (status == TextToSpeech.SUCCESS) { Log.d("TTS", "Text to speech engine started successfully."); tts.setLanguage(Locale.US); } else { Log.d("TTS", "Error starting the text to speech engine."); } } }; tts = new TextToSpeech(this.getApplicationContext(), listener); }