Example usage for android.view ScaleGestureDetector ScaleGestureDetector

List of usage examples for android.view ScaleGestureDetector ScaleGestureDetector

Introduction

In this page you can find the example usage for android.view ScaleGestureDetector ScaleGestureDetector.

Prototype

public ScaleGestureDetector(Context context, OnScaleGestureListener listener) 

Source Link

Document

Creates a ScaleGestureDetector with the supplied listener.

Usage

From source file:com.example.android.enghack_receipt_scanner.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*ww  w .jav  a  2  s .  co m*/
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.ocr_capture);

    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
            WindowManager.LayoutParams.FLAG_FULLSCREEN);

    ActionBar topBar = getSupportActionBar();
    if (topBar != null) {
        topBar.setBackgroundDrawable(new ColorDrawable(Color.parseColor("#6A8347")));
    }

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, true);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the ocr_capture permission before accessing the ocr_capture.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();
}

From source file:com.example.sherrychuang.splitsmart.Activity.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//* ww  w.  j  av  a2s. c  o m*/
@Override
public void onCreate(Bundle bundle) {
    super.onCreate(bundle);
    setContentView(R.layout.ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);
    itemInputs = new ArrayList<>();
    e = (Event) getIntent().getSerializableExtra("event");
    itemInputsTest = new ArrayList<>();
    String s = "Apple";
    List<Tag> tmp = new ArrayList<Tag>();
    itemInputsTest.add(new ItemInput(false, s, "", tmp));
    // Set good defaults for capturing text.
    itemInputsTest.get(0).setPrice("2");

    ItemAr = new String[itemInputs.size()];
    PriceAr = new String[itemInputs.size()];
    for (int j = 0; j < itemInputs.size(); j++) {
        ItemAr[j] = itemInputs.get(j).getItemName();
        PriceAr[j] = itemInputs.get(j).getPrice();
    }

    boolean autoFocus = true;
    boolean useFlash = false;

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    // TODO: Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("TTS", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("TTS", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(this.getApplicationContext(), listener);
    //button for saving item and price
    Button btn = (Button) findViewById(R.id.done);
    btn.setOnClickListener(new View.OnClickListener() {
        public void onClick(View view) {
            Log.d("OcrCaptureActivity", "done");
            Intent myIntent = new Intent(OcrCaptureActivity.this, BillPage.class);
            myIntent.putExtra("Event", e);
            myIntent.putExtra("ItemInput", ItemAr);
            myIntent.putExtra("PriceInput", PriceAr);
            onPause();
            OcrCaptureActivity.this.startActivity(myIntent);

            /*if(itemInputs!=null) {
            Intent myIntent = new Intent(view.getContext(), BillContentPage.class);
            myIntent.putExtra("ItemInput", itemInputsTest);
            OcrCaptureActivity.this.startActivity(myIntent);
            }*/

        }

    });
}

From source file:com.jwork.spycamera.MainFragment.java

@SuppressWarnings("deprecation")
private void initView(View view) {
    log.v(this, "initView()");

    this.layoutBlack = (LinearLayout) view.findViewById(R.id.blackLayout);
    this.layoutBlack.setOnTouchListener(this);
    this.layoutCenter = (LinearLayout) view.findViewById(R.id.linearLayoutCenter);

    //Widgets//from w  ww.j a  v  a2 s.  c om
    this.svPreview = (SurfaceView) view.findViewById(R.id.svPreview);
    this.svPreview.setDrawingCacheQuality(100);
    this.svPreview.setDrawingCacheEnabled(true);
    this.svPreview.setZOrderOnTop(true);
    this.svPreview.setOnTouchListener(this);
    this.shPreview = this.svPreview.getHolder();
    this.shPreview.addCallback(this);
    this.shPreview.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    this.shPreview.setFormat(PixelFormat.TRANSPARENT);

    this.btnAuto = (Button) view.findViewById(R.id.btnAuto);
    this.btnAuto.setOnClickListener(this);
    this.btnBlack = (Button) view.findViewById(R.id.btnBlack);
    this.btnBlack.setOnClickListener(this);
    this.btnCapture = (Button) view.findViewById(R.id.btnCapture);
    this.btnCapture.setOnClickListener(this);
    this.btnFace = (Button) view.findViewById(R.id.btnFace);
    this.btnFace.setOnClickListener(this);
    this.btnVideo = (Button) view.findViewById(R.id.btnVideo);
    this.btnVideo.setOnClickListener(this);
    this.btnSwitchCam = (Button) view.findViewById(R.id.btnSwitchCam);
    this.btnSwitchCam.setOnClickListener(this);
    this.btnDecSize = (Button) view.findViewById(R.id.btnDecreaseSize);
    this.btnDecSize.setOnClickListener(this);
    this.btnIncSize = (Button) view.findViewById(R.id.btnIncreaseSize);
    this.btnIncSize.setOnClickListener(this);
    this.btnHelp = (Button) view.findViewById(R.id.btnHelp);
    this.btnHelp.setOnClickListener(this);
    this.btnSetting = (Button) view.findViewById(R.id.btnSetting);
    this.btnSetting.setOnClickListener(this);

    sgdPreview = new ScaleGestureDetector(activity, this);
    sgdBlack = new ScaleGestureDetector(activity, this);

    layoutBlack.setVisibility(View.INVISIBLE);
    layoutBlack.setOnTouchListener(this);
    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
        btnFace.setVisibility(View.INVISIBLE);
    }
    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.GINGERBREAD) {
        btnVideo.setVisibility(View.INVISIBLE);
    }

}

From source file:org.ros.android.view.visualization.layer.CameraControlLayer.java

@Override
public void onStart(final VisualizationView view, ConnectedNode connectedNode) {
    view.post(new Runnable() {
        @Override/*from  w w  w .  j a  va  2s  .  co  m*/
        public void run() {
            translateGestureDetector = new GestureDetectorCompat(view.getContext(),
                    new GestureDetector.SimpleOnGestureListener() {
                        @Override
                        public boolean onDown(MotionEvent e) {
                            // This must return true in order for onScroll() to trigger.
                            return true;
                        }

                        @Override
                        public boolean onScroll(MotionEvent event1, MotionEvent event2, final float distanceX,
                                final float distanceY) {
                            view.getCamera().translate(-distanceX, distanceY);
                            listeners.signal(new SignalRunnable<CameraControlListener>() {
                                @Override
                                public void run(CameraControlListener listener) {
                                    listener.onTranslate(-distanceX, distanceY);
                                }
                            });
                            return true;
                        }

                        @Override
                        public boolean onDoubleTap(final MotionEvent e) {
                            listeners.signal(new SignalRunnable<CameraControlListener>() {
                                @Override
                                public void run(CameraControlListener listener) {
                                    listener.onDoubleTap(e.getX(), e.getY());
                                }
                            });
                            return true;
                        }
                    });
            rotateGestureDetector = new RotateGestureDetector(
                    new RotateGestureDetector.OnRotateGestureListener() {
                        @Override
                        public boolean onRotate(MotionEvent event1, MotionEvent event2,
                                final double deltaAngle) {
                            final float focusX = (event1.getX(0) + event1.getX(1)) / 2;
                            final float focusY = (event1.getY(0) + event1.getY(1)) / 2;
                            view.getCamera().rotate(focusX, focusY, deltaAngle);
                            listeners.signal(new SignalRunnable<CameraControlListener>() {
                                @Override
                                public void run(CameraControlListener listener) {
                                    listener.onRotate(focusX, focusY, deltaAngle);
                                }
                            });
                            return true;
                        }
                    });
            zoomGestureDetector = new ScaleGestureDetector(view.getContext(),
                    new ScaleGestureDetector.SimpleOnScaleGestureListener() {
                        @Override
                        public boolean onScale(ScaleGestureDetector detector) {
                            if (!detector.isInProgress()) {
                                return false;
                            }
                            final float focusX = detector.getFocusX();
                            final float focusY = detector.getFocusY();
                            final float factor = detector.getScaleFactor();
                            view.getCamera().zoom(focusX, focusY, factor);
                            listeners.signal(new SignalRunnable<CameraControlListener>() {
                                @Override
                                public void run(CameraControlListener listener) {
                                    listener.onZoom(focusX, focusY, factor);
                                }
                            });
                            return true;
                        }
                    });
        }
    });
}

From source file:divya.myvision.TessActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*  ww w  . j  av a  2  s.c  om*/
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);

    setContentView(R.layout.ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<TessGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
    String fps = getIntent().getStringExtra(FPS);
    String fontSize = getIntent().getStringExtra(FontSize);
    String orientation = getIntent().getStringExtra(Orientation);
    String lang = getIntent().getStringExtra(Lang);

    if (orientation.equals("Landscape")) {
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
    } else if (orientation.equals("Portrait")) {
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
    } else {
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR);
    }

    Settings.setFontSize(Float.parseFloat(fontSize));

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash, fps);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, R.string.info_msg, Snackbar.LENGTH_LONG).show();

    setLang(lang);
}

From source file:com.stfalcon.frescoimageviewer.ImageViewerView.java

private void init() {
    inflate(getContext(), R.layout.image_viewer, this);

    backgroundView = findViewById(R.id.backgroundView);
    pager = (MultiTouchViewPager) findViewById(R.id.pager);

    dismissContainer = (ViewGroup) findViewById(R.id.container);
    swipeDismissListener = new SwipeToDismissListener(findViewById(R.id.dismissView), this, this);
    dismissContainer.setOnTouchListener(swipeDismissListener);

    directionDetector = new SwipeDirectionDetector(getContext()) {
        @Override/* www.j a v a 2s. c  om*/
        public void onDirectionDetected(Direction direction) {
            ImageViewerView.this.direction = direction;
        }
    };

    scaleDetector = new ScaleGestureDetector(getContext(),
            new ScaleGestureDetector.SimpleOnScaleGestureListener());

    gestureDetector = new GestureDetectorCompat(getContext(), new GestureDetector.SimpleOnGestureListener() {
        @Override
        public boolean onSingleTapConfirmed(MotionEvent e) {
            if (pager.isScrolled()) {
                onClick(e, isOverlayWasClicked);
            }
            return false;
        }
    });

    pager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
        @Override
        public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
            adapter.stopVideoPlayback();
        }

        @Override
        public void onPageSelected(int position) {

        }

        @Override
        public void onPageScrollStateChanged(int state) {
            scrollState = state;
            if (state == ViewPager.SCROLL_STATE_IDLE) {
                modifyData();
            }
        }
    });
}

From source file:org.mklab.mikity.android.CanvasFragment.java

/**
 * {@inheritDoc}//from   w w  w . j av a 2  s .c o m
 */
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
    super.onCreateView(inflater, container, savedInstanceState);

    this.view = inflater.inflate(R.layout.fragment_canvas, container, false);
    this.view.setOnTouchListener(this);

    this.glView = (GLSurfaceView) this.view.findViewById(R.id.glview1);
    this.glView.setEGLConfigChooser(8, 8, 8, 8, 16, 8);

    getResources();

    final ConfigurationModel configuration = new ConfigurationModel();
    configuration.setEye(new EyeModel(5.0f, 0.0f, 0.0f));
    configuration.setLookAtPoiint(new LookAtPointModel(0.0f, 0.0f, 0.0f));
    configuration.setLight(new LightModel(10.0f, 10.0f, 20.0f));

    this.objectRenderer = new OpenglesObjectRenderer(this.glView, configuration);
    this.modeler = new OpenglesModeler(this.objectRenderer);

    this.glView.setRenderer(this.objectRenderer);
    this.glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    this.isInitialScreenSize = false;

    createNewModelData();

    this.gestureDetector = new ScaleGestureDetector(this.getActivity(), this);

    return this.view;
}

From source file:com.nice295.fridgeplease.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//* w w w  .  j a  va2s.  co  m*/
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.activity_ocr_capture);

    Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
    setSupportActionBar(toolbar);
    getSupportActionBar().setDisplayHomeAsUpEnabled(true);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    mTempItemArray = new ArrayList<String>();
    mTags = new ArrayList<Tag>();
    mTagsView = (TagView) findViewById(R.id.tags);

    /*
    //set click listener
    tags.setOnTagClickListener(new TagView.OnTagClickListener() {
    @Override
    public void onTagClick(Tag tag, int position) {
        Log.d(TAG, "onTagClick");
    }
    });
            
    //set delete listener
    tags.setOnTagDeleteListener(new TagView.OnTagDeleteListener() {
    @Override
    public void onTagDeleted(final TagView view, final Tag tag, final int position) {
        Log.d(TAG, "onTagDeleted");
    }
    });
            
    //set long click listener
    tags.setOnTagLongClickListener(new TagView.OnTagLongClickListener() {
    @Override
    public void onTagLongClick(Tag tag, int position) {
        Log.d(TAG, "onTagLongClick");
    }
    });
    */

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    /*
    Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
        Snackbar.LENGTH_LONG)
        .show();
    */

    //Paper.init(this);

    mDatabase = FirebaseDatabase.getInstance().getReference();

    handler = new Handler() {
        public void handleMessage(Message msg) {
            synchronized (mTags) {
                mTagsView.addTags(mTags);
            }
            mMenu.findItem(R.id.action_add).setEnabled(true);
        }
    };
}

From source file:com.acceleratedio.pac_n_zoom.AnimActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_anm);
    orgnlImageView = (ImageView) findViewById(R.id.imageView);
    orgnlImageView.setMaxHeight(800);//ww w.  j  a  v  a2s.c om
    orgnlImageView.setMaxWidth(600);
    crt_ctx = this;

    BitmapFactory.Options bmp_opt = new BitmapFactory.Options();
    bmp_opt.inTargetDensity = DisplayMetrics.DENSITY_DEFAULT;

    // - Now we need to set the GUI ImageView data with data read from the picked file.
    DcodRszdBmpFil dcodRszdBmpFil = new DcodRszdBmpFil();
    Bitmap bmp = dcodRszdBmpFil.DcodRszdBmpFil(SelectImageActivity.orgFil, bmp_opt);

    // Now we need to set the GUI ImageView data with the orginal file selection.
    orgnlImageView.setImageBitmap(bmp);
    orgnl_iv_wdth = bmp.getWidth();
    orgnl_iv_hght = bmp.getHeight();
    final RelativeLayout rel_anm_lo = (RelativeLayout) findViewById(R.id.activity_anm_lo);
    scaleGestureDetector = new ScaleGestureDetector(this, new simpleOnScaleGestureListener());

    orgnlImageView.setOnTouchListener(new View.OnTouchListener() {

        @Override
        public boolean onTouch(View v, MotionEvent event) {

            if (event.getPointerCount() > 1 || flgInScale) {

                scaleGestureDetector.onTouchEvent(event);
                return true;
            }

            int end_hrz;
            int end_vrt;
            final int pointerIndex;

            switch (event.getAction()) {

            case MotionEvent.ACTION_DOWN:

                pointerIndex = MotionEventCompat.getActionIndex(event);
                bgn_hrz = (int) MotionEventCompat.getX(event, pointerIndex);
                bgn_vrt = (int) MotionEventCompat.getY(event, pointerIndex);

                String log_str = "Beginning coordinates: Horz = " + String.valueOf(bgn_hrz) + "; Vert = "
                        + String.valueOf(bgn_vrt);

                Log.d("OnTouchListener", log_str);
                orlp = (RelativeLayout.LayoutParams) orgnlImageView.getLayoutParams();
                bgn_top = (int) orlp.topMargin;
                bgn_lft = (int) orlp.leftMargin;

                // To prevent an initial jump of the magnifier, aposX and aPosY must
                // have the values from the magnifier frame
                if (aPosX == 0)
                    aPosX = orgnlImageView.getX();
                if (aPosY == 0)
                    aPosY = orgnlImageView.getY();
                break;

            case MotionEvent.ACTION_MOVE:

                pointerIndex = MotionEventCompat.getActionIndex(event);
                float crt_hrz = MotionEventCompat.getX(event, pointerIndex);
                float crt_vrt = MotionEventCompat.getY(event, pointerIndex);
                final float dx = crt_hrz - bgn_hrz;
                final float dy = crt_vrt - bgn_vrt;
                aPosX += dx;
                aPosY += dy;
                orgnlImageView.setX(aPosX);
                orgnlImageView.setY(aPosY);

                log_str = "Current Position: Horz = " + String.valueOf(crt_hrz) + "; Vert = "
                        + String.valueOf(crt_vrt);

                Log.d("OnTouchListener", log_str);

                break;

            case MotionEvent.ACTION_UP:

                pointerIndex = MotionEventCompat.getActionIndex(event);
                end_hrz = (int) MotionEventCompat.getX(event, pointerIndex);
                end_vrt = (int) MotionEventCompat.getY(event, pointerIndex);
            }

            rel_anm_lo.invalidate();
            return true;
        }
    });

    sav_anm_btn = (Button) findViewById(R.id.sav_btn);

    sav_anm_btn.setOnClickListener(new View.OnClickListener() {

        public void onClick(View vw) {

            onClickFlg = 1;
            RelativeLayout rel_anm_lo = (RelativeLayout) findViewById(R.id.activity_anm_lo);
            rel_anm_lo.removeView(vw);
            Bitmap tnBmp = getWrtBmp("thumbnail", rel_anm_lo, 40);
            tnBmp.recycle();
            int vw_nmbr = anmViews.size();

            for (int vw_mbr = 1; vw_mbr < vw_nmbr; vw_mbr += 1) {

                anim_view = anmViews.get(vw_mbr);

                if (anim_view != null) {

                    Animation crt_anm = anim_view.getAnimation();

                    if (crt_anm != null)
                        crt_anm.cancel();

                    anim_view.setAnimation(null);
                    rel_anm_lo.removeView(anim_view);

                    // Garbage collect the bitmap
                    Drawable drawable = anim_view.getDrawable();

                    if (drawable instanceof BitmapDrawable) {
                        BitmapDrawable bitmapDrawable = (BitmapDrawable) drawable;
                        Bitmap anim_bmp = bitmapDrawable.getBitmap();
                        anim_bmp.recycle();
                    }
                }
            }

            Bitmap orgnlImageBmp = getWrtBmp("bgimg", rel_anm_lo, 90);
            orgnlImageWdth = Integer.toString(orgnlImageBmp.getWidth());
            orgnlImageHght = Integer.toString(orgnlImageBmp.getHeight());
            anmViews.clear();
            unbindDrawables(rel_anm_lo);
            ((RelativeLayout) rel_anm_lo).removeAllViews();
            orgnlImageBmp.recycle();
            crt_ctx = null;
            orgnlImageView = null;

            Intent intent = new Intent(AnimActivity.this, com.acceleratedio.pac_n_zoom.SaveAnmActivity.class);

            startActivity(intent);
        }
    });

    progress = ProgressDialog.show(crt_ctx, "Loading the animation", "dialog message", true);
    GetRequest get_svg_img = new GetRequest();
    get_svg_img.execute("");
}

From source file:com.masseyhacks.sjam.cheqout.ScannerActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 */// w  ww .j a  v  a2s.  com
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_scanner);

    FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
    fab.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View view) {
            // GO TO CART
            Intent intent = new Intent(getApplicationContext(), CartActivity.class);
            intent.putExtra("items", items);
            startActivity(intent);
        }
    });

    items = new LinkedHashMap();
    //        quantities = new LinkedHashMap();

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, true);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    //        Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
    //                Snackbar.LENGTH_LONG)
    //                .show();
}