Example usage for android.view ScaleGestureDetector ScaleGestureDetector

List of usage examples for android.view ScaleGestureDetector ScaleGestureDetector

Introduction

In this page you can find the example usage for android.view ScaleGestureDetector ScaleGestureDetector.

Prototype

public ScaleGestureDetector(Context context, OnScaleGestureListener listener) 

Source Link

Document

Creates a ScaleGestureDetector with the supplied listener.

Usage

From source file:com.med.fast.ocr.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from   w  w  w  . ja va2s  .  c om*/
@Override
public void onCreate(Bundle bundle) {
    super.onCreate(bundle);
    setContentView(R.layout.ocr_capture);

    // Set good defaults for capturing text.
    boolean autoFocus = true;
    boolean useFlash = false;

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();

    // TODO: Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("TTS", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("TTS", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(this.getApplicationContext(), listener);
}

From source file:com.google.android.gms.samples.vision.barcodereader.BarcodeCapture.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from  w w w.j a  va 2s  .  c o m*/

@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container,
        @Nullable Bundle savedInstanceState) {
    View rootView = inflater.inflate(R.layout.barcode_capture, container, false);

    mPreview = (CameraSourcePreview) rootView.findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) rootView.findViewById(R.id.graphicOverlay);
    mGraphicOverlay.setShowText(isShouldShowText());
    mGraphicOverlay.setRectColors(getRectColors());
    mGraphicOverlay.setDrawRect(isShowDrawRect());

    // read parameters from the intent used to launch the activity.

    requestCameraPermission();

    gestureDetector = new GestureDetector(getContext(), new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(getContext(), new ScaleListener());

    rootView.setOnTouchListener(new View.OnTouchListener() {
        @Override
        public boolean onTouch(View view, MotionEvent e) {
            boolean b = scaleGestureDetector.onTouchEvent(e);

            boolean c = gestureDetector.onTouchEvent(e);
            return b || c || view.onTouchEvent(e);
        }
    });
    return rootView;
}

From source file:ar.com.bestprice.buyitnow.barcodereader.BarcodeCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *///from   w ww  .  ja  v  a  2 s  .  c o m
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.barcode_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();
}

From source file:org.careerop.textscanner.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from ww  w .  jav  a2  s.c om*/
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Use two finger to zoom", Snackbar.LENGTH_LONG).show();
}

From source file:ca.frozen.curlingtv.activities.VideoFragment.java

@Override
public void onCreate(Bundle savedInstanceState) {
    // configure the activity
    super.onCreate(savedInstanceState);

    // load the settings and cameras
    Utils.loadData();/*  w w  w  .j a  v a2 s .com*/

    // get the parameters
    camera = getArguments().getParcelable(CAMERA);
    fullScreen = getArguments().getBoolean(FULL_SCREEN);

    // create the gesture recognizers
    simpleDetector = new GestureDetector(getActivity(), new SimpleListener());
    scaleDetector = new ScaleGestureDetector(getActivity(), new ScaleListener());

    // create the fade in handler and runnable
    fadeInHandler = new Handler();
    fadeInRunner = new Runnable() {
        @Override
        public void run() {
            Animation fadeInName = new AlphaAnimation(0, 1);
            fadeInName.setDuration(FADEIN_ANIMATION_TIME);
            fadeInName.setFillAfter(true);
            Animation fadeInSnapshot = new AlphaAnimation(0, 1);
            fadeInSnapshot.setDuration(FADEIN_ANIMATION_TIME);
            fadeInSnapshot.setFillAfter(true);
            nameView.startAnimation(fadeInName);
            snapshotButton.startAnimation(fadeInSnapshot);
            fadeListener.onStartFadeIn();
        }
    };

    // create the fade out handler and runnable
    fadeOutHandler = new Handler();
    fadeOutRunner = new Runnable() {
        @Override
        public void run() {
            Animation fadeOutName = new AlphaAnimation(1, 0);
            fadeOutName.setDuration(FADEOUT_ANIMATION_TIME);
            fadeOutName.setFillAfter(true);
            Animation fadeOutSnapshot = new AlphaAnimation(1, 0);
            fadeOutSnapshot.setDuration(FADEOUT_ANIMATION_TIME);
            fadeOutSnapshot.setFillAfter(true);
            nameView.startAnimation(fadeOutName);
            snapshotButton.startAnimation(fadeOutSnapshot);
            fadeListener.onStartFadeOut();
        }
    };

    // create the finish handler and runnable
    finishHandler = new Handler();
    finishRunner = new Runnable() {
        @Override
        public void run() {
            getActivity().finish();
        }
    };
}

From source file:ocr.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *///  www . j  a  v a 2s  .co  m
@Override
public void onCreate(Bundle bundle) {
    super.onCreate(bundle);
    setContentView(R.layout.activity_ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // Set good defaults for capturing text.
    boolean autoFocus = true;
    boolean useFlash = false;

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();

    // TODO: Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("TTS", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("TTS", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(this.getApplicationContext(), listener);

}

From source file:com.example.sanya.likhawat_v1.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from  w  ww  . ja  v  a  2 s .  c o m*/
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();
}

From source file:nl.achan.apps.sbb_spelgids.scanner.BarcodeCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *///from  w  w  w  .j av a2  s  .c o  m
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.barcode_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, true);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();
}

From source file:org.videolan.vlc.gui.view.PopupLayout.java

@SuppressWarnings("deprecation")
private void init(Context context) {
    mWindowManager = (WindowManager) context.getApplicationContext().getSystemService(Context.WINDOW_SERVICE);

    final WindowManager.LayoutParams params = new WindowManager.LayoutParams(
            VLCApplication.getAppResources().getDimensionPixelSize(R.dimen.video_pip_width),
            VLCApplication.getAppResources().getDimensionPixelSize(R.dimen.video_pip_heigth),
            WindowManager.LayoutParams.TYPE_PHONE, WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE,
            PixelFormat.OPAQUE);//from  w  ww .j  a v a 2 s  .  com

    params.gravity = Gravity.BOTTOM | Gravity.START;
    params.x = 50;
    params.y = 50;
    if (AndroidUtil.isHoneycombOrLater())
        mScaleGestureDetector = new ScaleGestureDetector(context, this);
    setOnTouchListener(this);
    mWindowManager.addView(this, params);
    mLayoutParams = (WindowManager.LayoutParams) getLayoutParams();

    updateWindowSize();
}

From source file:com.example.ocr.linkfetcherocr.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *///from   w w w. java 2 s . c  om
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();
    // To get a back button when capturing a picture
    getSupportActionBar().setDisplayHomeAsUpEnabled(true);

}