hr.abunicic.angular.CameraActivity.java Source code

Java tutorial

Introduction

Here is the source code for hr.abunicic.angular.CameraActivity.java

Source

package hr.abunicic.angular;

import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Vibrator;
import android.support.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.CardView;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.method.ScrollingMovementMethod;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.Button;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.TextView;
import android.widget.Toast;

import com.konifar.fab_transformation.FabTransformation;
import com.microblink.directApi.DirectApiErrorListener;
import com.microblink.directApi.Recognizer;
import com.microblink.hardware.orientation.Orientation;
import com.microblink.image.Image;
import com.microblink.recognition.FeatureNotSupportedException;
import com.microblink.recognition.InvalidLicenceKeyException;
import com.microblink.recognizers.BaseRecognitionResult;
import com.microblink.recognizers.RecognitionResults;
import com.microblink.recognizers.blinkocr.BlinkOCRRecognitionResult;
import com.microblink.recognizers.blinkocr.BlinkOCRRecognizerSettings;
import com.microblink.recognizers.blinkocr.parser.generic.RawParserSettings;
import com.microblink.recognizers.settings.RecognitionSettings;
import com.microblink.recognizers.settings.RecognizerSettings;
import com.microblink.results.ocr.OcrChar;
import com.microblink.results.ocr.OcrResult;
import com.microblink.view.recognition.ScanResultListener;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import hr.abunicic.angular.databasehandler.DatabaseHandler;
import hr.abunicic.angular.databasehandler.ShapeInDatabase;
import hr.abunicic.angular.engine.JobScheduler;
import hr.abunicic.angular.engine.PrepareTask;
import hr.abunicic.angular.geometryobjects.Line;
import hr.abunicic.angular.heuristics.shapes.Shape;
import hr.abunicic.angular.engine.RecognitionMethods;
import hr.abunicic.angular.heuristics.ShapeHeuristic;
import hr.abunicic.angular.engine.VisionEngineResults;
import hr.abunicic.angular.heuristics.shapes.polygon.DefaultPolygon;
import hr.abunicic.angular.heuristics.shapes.polygon.Polygon;

/**
 * Created by Antonio Buni?i on 03/03/16.
 * Angler's main Activity.
 * Contains methods that initialize the camera, its preview and responsiveness to touch,
 * methods that set up all custom views, populate the drawer with data from the database etc.
 * Processing of camera preview frames, detection and recognition of geometric shapes are also started from this Activity.
 */

public class CameraActivity extends AppCompatActivity implements SensorEventListener, ScanResultListener {
    static {
        System.loadLibrary("opencv_java3");
    }

    public static Bitmap croppedBitmap = null;

    //Instance of Camera used for getting picture from mobile phone's camera hardware
    private Camera mCamera = null;
    //Preview of input from camera.
    private static CameraView mCameraView = null;
    //Layout that contains camera preview
    FrameLayout preview = null;
    //Camera parameters
    static Camera.Parameters params;
    //Flag that says if detection is stopped to show the preview of currently detected shape
    private boolean startPreview = false;

    //Focus parameters
    private int FOCUS_AREA_SIZE = 400;

    //Touch coordinates
    private float touchX;
    private float touchY;

    //Recognized number coordinates
    private static float numberX;
    private static float numberY;

    //Line length
    public static String lineLength;
    //Selected line
    public static Line selectedLine;
    //Scheduler of tasks
    public static int inScheduler = 0;

    //Instance of VisionEngineResults where the results of analysis of every frame is stored in order to be shown on screen
    public static VisionEngineResults rp;

    //CornersView is this app's custom view that's used for selecting and cropping the interesting part of the frame,
    //but also for informing user if the phone is not aligned correctly
    private CornersView cornersView;
    private FrameLayout shadow;

    //Screen size parameters
    public static int screenHeight;
    public static int screenWidth;

    //Image size parameters
    public static int imageHeight;
    public static int imageWidth;

    //FAB button for stopping detection and focusing on current shape
    FloatingActionButton fabCapture;

    //Card with most important info about currently detected shape
    private CardView cardBottom;

    //Title and info text inside the card at the bottom
    private static TextView tvCardTitle;
    private static TextView tvShape;

    //Left drawer, RecyclerView and its adapter for showing data from the database in the history drawer
    private DrawerLayout drawerLayout;
    private RecyclerView recyclerView;
    private CustomAdapter adapter;
    TextView infoNothingSaved;
    ArrayList historyItems;

    //Sensor parameters
    private SensorManager mSensorManager;
    private Sensor mGyroSensor;
    double angularXSpeed = 0;
    double angularYSpeed = 0;

    //Database handler instance
    private DatabaseHandler db = null;

    private Recognizer mRecognizer;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        ApplicationContext.getInstance().init(getApplicationContext());
        setContentView(R.layout.activity_camera);

        //Left drawer
        drawerLayout = (DrawerLayout) findViewById(R.id.camera_activity_drawer_layout);

        //Binding the RecyclerView
        recyclerView = (RecyclerView) findViewById(R.id.r_list);
        recyclerView.setHasFixedSize(true);

        //Informative text when database is empty
        infoNothingSaved = (TextView) findViewById(R.id.infoNothing);

        //Getting all saved shaped from the database and populating the RecyclerView
        db = new DatabaseHandler(CameraActivity.this);
        historyItems = (ArrayList) db.getAllShapes();
        setHistoryItems();

        //Card at the bottom
        cardBottom = (CardView) findViewById(R.id.cardBottom);
        cardBottom.bringToFront();
        //Elements inside the CardView
        tvShape = (TextView) findViewById(R.id.tvGeomLik);
        tvCardTitle = (TextView) findViewById(R.id.titleText);
        tvShape.setMovementMethod(new ScrollingMovementMethod());
        final ImageButton imgSave = (ImageButton) findViewById(R.id.imgSave);

        //Getting instance of the sensor service
        mSensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
        mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);

        //Initializing the camera and setting up screen size
        initCamera();
        DisplayMetrics displayMetrics = new DisplayMetrics();
        this.getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
        screenHeight = displayMetrics.heightPixels;
        screenWidth = displayMetrics.widthPixels;

        //Flash button
        final ImageButton imgFlash = (ImageButton) findViewById(R.id.imgFlash);
        if (this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
            imgFlash.setOnClickListener(new View.OnClickListener() {
                @Override
                public void onClick(View view) {
                    if (params.getFlashMode().equals(Camera.Parameters.FLASH_MODE_TORCH)) {
                        params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);

                        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
                            imgFlash.setBackground(getResources().getDrawable(R.drawable.ic_flash_on_white_36dp));
                        }
                    } else {
                        params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);

                        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
                            imgFlash.setBackground(getResources().getDrawable(R.drawable.ic_flash_off_white_36dp));
                        }
                    }
                    mCamera.setParameters(params);
                }
            });
        } else {
            imgFlash.setVisibility(View.GONE);
        }

        //Delete all button in the drawer view
        ImageButton imgDeleteAll = (ImageButton) findViewById(R.id.imgDeleteAll);
        imgDeleteAll.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                db.deleteAllShapes();
                historyItems.clear();

                setHistoryItems();
            }
        });

        //Menu icon for opening the drawer
        ImageButton imgMenu = (ImageButton) findViewById(R.id.imgMenu);
        imgMenu.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                drawerLayout.openDrawer(Gravity.LEFT);
            }
        });

        //Fab button functionality
        fabCapture = (FloatingActionButton) findViewById(R.id.fab);
        fabCapture.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                startDetection();
                fabCapture.setClickable(false);
                FabTransformation.with(fabCapture).transformTo(cardBottom);

                imgSave.setVisibility(View.VISIBLE);

                imgSave.setOnClickListener(new View.OnClickListener() {
                    @Override
                    public void onClick(View view) {
                        Animation animation = AnimationUtils.loadAnimation(CameraActivity.this, R.anim.anim);
                        mCameraView.startAnimation(animation);

                        Toast toast = Toast.makeText(getApplicationContext(),
                                getResources().getString(R.string.toast_saved), Toast.LENGTH_LONG); //Dodati u strings i u engl verziju
                        toast.setGravity(Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 0);
                        toast.show();

                        ShapeInDatabase shapeToAdd = new ShapeInDatabase(mCameraView.getBitmap(),
                                tvCardTitle.getText().toString(), tvShape.getText().toString());
                        db.addShape(shapeToAdd);
                        historyItems.add(0, shapeToAdd);

                        setHistoryItems();

                    }
                });

                if (rp != null) {
                    Shape shape = ShapeHeuristic.getShape(rp);
                    if (shape != null) {
                        tvCardTitle.setText(shape.getName());
                        tvShape.setText(shape.toString());
                        if (shape instanceof DefaultPolygon && ((DefaultPolygon) shape).getN() == 5) {
                            tvCardTitle.setText("Pravilni peterokut");
                            tvShape.setText(
                                    "Sve stranice pravilnog peterokuta su jednake duljine. \n     a = 5 cm \n     P = 43.01 \n     O = 25");
                        }
                    }
                }

            }
        });

        //Button inside the card for going back
        Button buttonBack = (Button) findViewById(R.id.buttonBack);
        buttonBack.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                startDetection();

                FabTransformation.with(fabCapture).transformFrom(cardBottom);
                fabCapture.setClickable(true);

                imgSave.setVisibility(View.INVISIBLE);
            }
        });

        //Button inside the card for opening the ResultActivity with more info about the shape
        Button buttonMore = (Button) findViewById(R.id.buttonMore);
        buttonMore.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                Shape shape = ShapeHeuristic.getShape(rp);
                if (shape instanceof DefaultPolygon && ((DefaultPolygon) shape).getN() == 5) {
                    Intent intentPeterokut = new Intent(CameraActivity.this, PeterokutActivity.class);
                    startActivity(intentPeterokut);
                } else {
                    Intent intent = new Intent(CameraActivity.this, ResultActivity.class);
                    intent.putExtra("RESULT_TITLE", tvCardTitle.getText().toString());
                    intent.putExtra("RESULT_INFO", tvShape.getText().toString());

                    ByteArrayOutputStream bos = new ByteArrayOutputStream();
                    mCameraView.getBitmap().compress(Bitmap.CompressFormat.PNG, 100, bos);
                    byte[] byteArray = bos.toByteArray();

                    intent.putExtra("RESULT_IMAGE", byteArray);
                    startActivity(intent);
                }

            }
        });

        //Corners View
        DisplayMetrics displaymetrics = new DisplayMetrics();
        getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
        cornersView = (CornersView) findViewById(R.id.cornersView);
        shadow = (FrameLayout) findViewById(R.id.shadowLayout);
        cornersView.setShadow(shadow);

        //Starting the detection process
        startDetection();

        //Microblink OCR
        try {
            mRecognizer = Recognizer.getSingletonInstance();
        } catch (FeatureNotSupportedException e) {
            Toast.makeText(CameraActivity.this, "Feature not supported! Reason: " + e.getReason().getDescription(),
                    Toast.LENGTH_LONG).show();
            finish();
            return;
        }
        try {
            // set license key
            mRecognizer.setLicenseKey(CameraActivity.this,
                    "Y5352CQ5-A7KVPD26-UOAUEX4P-D2GQM63S-J6TCRGNH-T5WFKI24-QQZJRAXL-AT55KX4N");
        } catch (InvalidLicenceKeyException exc) {
            finish();
            return;
        }
        RecognitionSettings settings = new RecognitionSettings();
        // setupSettingsArray method is described in chapter "Recognition settings and results")
        settings.setRecognizerSettingsArray(setupSettingsArray());
        mRecognizer.initialize(CameraActivity.this, settings, new DirectApiErrorListener() {
            @Override
            public void onRecognizerError(Throwable t) {
                Toast.makeText(CameraActivity.this,
                        "There was an error in initialization of Recognizer: " + t.getMessage(), Toast.LENGTH_SHORT)
                        .show();
                finish();
            }
        });

    }

    @Override
    protected void onPause() {
        super.onPause();

        //Unregistering the sensor when the activity is paused
        mSensorManager.unregisterListener(this);

        //Turning off flash if it's on when the activity is paused
        if (params.getFlashMode().equals(Camera.Parameters.FLASH_MODE_TORCH)) {
            params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
        }
    }

    @Override
    protected void onResume() {
        super.onResume();

        //Registering the sensor when the activity is resumed
        mSensorManager.registerListener(this, mGyroSensor, SensorManager.SENSOR_DELAY_NORMAL);

        //Reinitializing the camera
        preview.removeAllViews();
        initCamera();
        startPreview = false;

        //Starting detection if no shape is paused
        if (cardBottom.getVisibility() == View.INVISIBLE)
            startDetection();
    }

    @Override
    public void onBackPressed() {
        if (cardBottom.getVisibility() == View.VISIBLE) {
            FabTransformation.with(fabCapture).transformFrom(cardBottom);
            fabCapture.setClickable(true);
            startDetection();
        }

        if (drawerLayout.isDrawerOpen(Gravity.LEFT)) {
            drawerLayout.closeDrawer(Gravity.LEFT);
        }
    }

    @Override
    public void onSensorChanged(SensorEvent event) {
        //Getting orientation of the device
        angularXSpeed = event.values[0] * 180;
        angularYSpeed = event.values[1] * 180;

        //If both X and Y axis are bad, only one corner is red, choosing which one
        if (-Conf.maxTiltOffSet > angularXSpeed && -Conf.maxTiltOffSet > angularYSpeed) {
            cornersView.setCornersColor(Conf.CORNER_COLOR_GOOD);
            cornersView.setSingleCornerColor(Conf.TOPLEFT, Conf.CORNER_COLOR_BAD);
        } else if (-Conf.maxTiltOffSet > angularXSpeed && angularYSpeed > Conf.maxTiltOffSet) {
            cornersView.setCornersColor(Conf.CORNER_COLOR_GOOD);
            cornersView.setSingleCornerColor(Conf.TOPRIGHT, Conf.CORNER_COLOR_BAD);
        } else if (Conf.maxTiltOffSet < angularXSpeed && angularYSpeed > Conf.maxTiltOffSet) {
            cornersView.setCornersColor(Conf.CORNER_COLOR_GOOD);
            cornersView.setSingleCornerColor(Conf.BOTTOMRIGHT, Conf.CORNER_COLOR_BAD);
        } else if (Conf.maxTiltOffSet < angularXSpeed && -Conf.maxTiltOffSet > angularYSpeed) {
            cornersView.setCornersColor(Conf.CORNER_COLOR_GOOD);
            cornersView.setSingleCornerColor(Conf.BOTTOMLEFT, Conf.CORNER_COLOR_BAD);

            //If Y axis is ok, but X is tilted, two corners will be red to show it
        } else if (-Conf.maxTiltOffSet > angularXSpeed && -Conf.maxTiltOffSet < angularYSpeed
                && angularYSpeed < Conf.maxTiltOffSet) {
            cornersView.setSingleCornerColor(Conf.TOPLEFT, Conf.CORNER_COLOR_BAD);
            cornersView.setSingleCornerColor(Conf.TOPRIGHT, Conf.CORNER_COLOR_BAD);
        } else if (Conf.maxTiltOffSet < angularXSpeed && -Conf.maxTiltOffSet < angularYSpeed
                && angularYSpeed < Conf.maxTiltOffSet) {
            cornersView.setSingleCornerColor(Conf.BOTTOMLEFT, Conf.CORNER_COLOR_BAD);
            cornersView.setSingleCornerColor(Conf.BOTTOMRIGHT, Conf.CORNER_COLOR_BAD);

            //If X axis is ok, but Y is tilted, two corners will be red to show it
        } else if (-Conf.maxTiltOffSet < angularXSpeed && angularXSpeed < Conf.maxTiltOffSet
                && -Conf.maxTiltOffSet > angularYSpeed) {
            cornersView.setSingleCornerColor(Conf.TOPLEFT, Conf.CORNER_COLOR_BAD);
            cornersView.setSingleCornerColor(Conf.BOTTOMLEFT, Conf.CORNER_COLOR_BAD);
        } else if (-Conf.maxTiltOffSet < angularXSpeed && angularXSpeed < Conf.maxTiltOffSet
                && angularYSpeed > Conf.maxTiltOffSet) {
            cornersView.setSingleCornerColor(Conf.TOPRIGHT, Conf.CORNER_COLOR_BAD);
            cornersView.setSingleCornerColor(Conf.BOTTOMRIGHT, Conf.CORNER_COLOR_BAD);

        } else {
            cornersView.setCornersColor(Conf.CORNER_COLOR_GOOD);
        }

        cornersView.setInvalidate();

    }

    @Override
    public void onAccuracyChanged(Sensor sensor, int accuracy) {
    }

    /**
     * Method that populates and updates the recyclerView
     */
    public void setHistoryItems() {
        if (historyItems.size() == 0) {
            infoNothingSaved.setVisibility(View.VISIBLE);
            adapter = new CustomAdapter(CameraActivity.this, historyItems);
        } else {
            infoNothingSaved.setVisibility(View.GONE);
            adapter = new CustomAdapter(CameraActivity.this, historyItems);
        }
        recyclerView.setLayoutManager(new LinearLayoutManager(this));
        recyclerView.setAdapter(adapter);
    }

    /**
     * Method that starts detection of shapes.
     */
    public void startDetection() {
        startPreview = !startPreview;

        mCamera.startPreview();
        mCamera.setPreviewCallback(new Camera.PreviewCallback() {
            @Override
            public void onPreviewFrame(byte[] data, Camera camera) {

                //Getting engine results
                VisionEngineResults res = JobScheduler.getEngineResults();
                if (res != null) {
                    rp = res;

                    updateDescription();
                }

                if (inScheduler < 3 && startPreview) {

                    inScheduler++;

                    cornersView.setVisibility(View.VISIBLE);

                    ByteArrayOutputStream out = new ByteArrayOutputStream();
                    params = mCamera.getParameters();
                    Camera.Size size = params.getPreviewSize();

                    //Compressing frame image to JPEG and then to byte array
                    YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
                    Rect rectangle = new Rect();
                    rectangle.bottom = size.height;
                    rectangle.top = 0;
                    rectangle.left = 0;
                    rectangle.right = size.width;
                    yuvImage.compressToJpeg(rectangle, 20, out);
                    byte[] imageBytes = out.toByteArray();

                    //Starting the PrepareTask for this frame
                    new PrepareTask().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, imageBytes);
                    /*
                                        Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes , 0, imageBytes.length);
                                        try {
                    Matrix matrix = new Matrix();
                        
                    matrix.postRotate(90);
                        
                    Bitmap scaledBitmap = Bitmap.createScaledBitmap(bitmap,bitmap.getWidth(),bitmap.getHeight(),true);
                        
                    Bitmap rotatedBitmap = Bitmap.createBitmap(scaledBitmap , 0, 0, scaledBitmap .getWidth(), scaledBitmap .getHeight(), matrix, true);
                    //storeImage(rotatedBitmap);
                    if (mRecognizer.getCurrentState().equals(Recognizer.State.READY)) {
                        mRecognizer.recognizeBitmap(rotatedBitmap, Orientation.ORIENTATION_LANDSCAPE_RIGHT, CameraActivity.this);
                    }
                                        } catch (IllegalStateException e) {
                        
                                        }
                    */

                    if (croppedBitmap != null) {
                        if (mRecognizer.getCurrentState().equals(Recognizer.State.READY)) {
                            storeImage(croppedBitmap);
                            mRecognizer.recognizeBitmap(croppedBitmap, Orientation.ORIENTATION_LANDSCAPE_RIGHT,
                                    CameraActivity.this);
                        }
                    } else {

                    }

                    camera.startPreview();

                } else if (!startPreview) {
                    cornersView.setVisibility(View.GONE);
                }

            }
        });
    }

    private RecognizerSettings[] setupSettingsArray() {
        BlinkOCRRecognizerSettings sett = new BlinkOCRRecognizerSettings();

        RawParserSettings rawSett = new RawParserSettings();

        // add raw parser with name "Raw" to default parser group
        // parser name is important for obtaining results later
        sett.addParser("Raw", rawSett);

        // now add sett to recognizer settings array that is used to configure
        // recognition
        return new RecognizerSettings[] { sett };
    }

    /**
     * Method that initializes the camera.
     */
    void initCamera() {
        try {
            mCamera = Camera.open();
        } catch (Exception e) {
            Log.d("ERROR", "Failed to get camera: " + e.getMessage());
        }

        if (mCamera != null) {
            //Creating a CameraView instance to show camera data
            mCameraView = new CameraView(this, mCamera);
            preview = (FrameLayout) findViewById(R.id.camera_view);
            //Adding the CameraView to the layout
            preview.addView(mCameraView);
            params = mCamera.getParameters();

            List<Camera.Size> ls = params.getSupportedPreviewSizes();
            Camera.Size size = ls.get(1);
            params.setPreviewSize(size.width, size.height);

            //Setting focus mode
            params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
            mCamera.setParameters(params);
            mCamera.setDisplayOrientation(90);
        }

        mCameraView.setOnLongClickListener(new View.OnLongClickListener() {
            @Override
            public boolean onLongClick(View v) {
                Vibrator vib = (Vibrator) getApplicationContext().getSystemService(Context.VIBRATOR_SERVICE);
                vib.vibrate(60);

                //Determining which line is selected
                if (rp != null) {
                    try {
                        selectedLine = getTouchedLine(touchX, touchY);
                        selectedLine.color = Color.BLUE;
                    } catch (Exception e) {
                    }
                } else {
                }

                //AlertDialog for changing the length of the line
                AlertDialog.Builder alert = new AlertDialog.Builder(CameraActivity.this);
                final EditText edittext = new EditText(CameraActivity.this);

                alert.setMessage("Duljina stranice: ");
                alert.setView(edittext);

                alert.setPositiveButton("U redu", new DialogInterface.OnClickListener() {
                    public void onClick(DialogInterface dialog, int whichButton) {
                        lineLength = edittext.getText().toString();
                        selectedLine.color = Color.CYAN;
                        Log.d("ocr", "Nova duljina linije: " + lineLength + " ");
                        selectedLine = getTouchedLine(numberX, numberY);
                        RecognitionMethods.refreshLines(rp.getLineSegments(), selectedLine, lineLength);
                        updateDescription();
                    }
                });

                alert.show();

                return true;
            }
        });

        mCameraView.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View v, MotionEvent event) {
                Camera camera = mCamera;
                camera.cancelAutoFocus();

                touchX = event.getX();
                touchY = event.getY();

                return false;
            }

        });

    }

    /**
     * Method which is called to determine which line was selected.
     * @param touchX X coordinate
     * @param touchY Y coordinate
     * @return line
     */
    public static Line getTouchedLine(float touchX, float touchY) {
        double distance;
        double[] pointA;
        double[] pointB;
        double[] pointC = { (double) touchX, (double) touchY };

        //Width and height are reversed due to rotation
        float ratioX = params.getPreviewSize().height / (float) screenWidth;
        float ratioY = params.getPreviewSize().width / (float) screenHeight;

        double minDist = 99999;
        Line minLine = null;

        for (Line line : rp.getLineSegments()) {
            float x1 = (float) (CornersView.mLeftTopPosX + (line.p1.x + rp.getStartPoint().x) * ratioX);
            float y1 = (float) (CornersView.mLeftTopPosY + (line.p1.y + rp.getStartPoint().y) * ratioY);
            float x2 = (float) (CornersView.mLeftTopPosX + (line.p2.x + rp.getStartPoint().x) * ratioX);
            float y2 = (float) (CornersView.mLeftTopPosY + (line.p2.y + rp.getStartPoint().y) * ratioY);

            pointA = new double[] { x1, y1 };
            pointB = new double[] { x2, y2 };

            distance = RecognitionMethods.LineToPointDistance2D(pointA, pointB, pointC, true);

            if (distance < minDist) {
                minDist = distance;
                minLine = line;
            }
        }

        mCameraView.setDrawing(rp.getLineSegments(), rp.getAngles(), rp.getStartPoint(), screenWidth, screenHeight);
        return minLine;
    }

    /**
     * Method that updates the drawing.
     */
    public static void update() {
        if (CameraActivity.rp != null) {

            mCameraView.setDrawing(CameraActivity.rp.getLineSegments(), CameraActivity.rp.getAngles(),
                    CameraActivity.rp.getStartPoint(), imageWidth, imageHeight);
        }
    }

    /**
     * Method that updates the drawing.
     * @param rp engine result
     */
    public static void update(VisionEngineResults rp) {
        try {
            selectedLine = getTouchedLine(numberX, numberY);
            RecognitionMethods.refreshLines(rp.getLineSegments(), selectedLine, lineLength);
            Log.d("ocr", "line " + lineLength + " " + numberX + " " + numberY + " " + selectedLine);
        } catch (Exception e) {
            Log.d("ocr", "greska");
        }
        for (Line l : rp.getLineSegments()) {
            Log.d("ocr", "l..." + l.toString() + " " + l.length);
        }
        mCameraView.setDrawing(rp.getLineSegments(), rp.getAngles(), rp.getStartPoint(), imageWidth, imageHeight);
    }

    /**
     * Method that updates the description in the card at the bottom of the screen.
     */
    public static void updateDescription() {
        if (rp != null) {
            Shape shape = ShapeHeuristic.getShape(rp);
            if (shape != null) {
                tvCardTitle.setText(shape.getName());
                tvShape.setText(shape.toString());
                if (shape instanceof DefaultPolygon && ((DefaultPolygon) shape).getN() == 5) {
                    tvCardTitle.setText("Pravilni peterokut");
                    tvShape.setText(
                            "Sve stranice pravilnog peterokuta su jednake duljine. \n     a = 5 cm \n     P = 43.01 \n     O = 25");
                }
            }
            update(rp);
        }
    }

    @Override
    public void onScanningDone(@Nullable RecognitionResults results) {

        BaseRecognitionResult[] dataArray = null;
        if (results != null) {
            dataArray = results.getRecognitionResults();
        }

        if (dataArray != null && dataArray.length == 1) {
            if (dataArray[0] instanceof BlinkOCRRecognitionResult) {
                BlinkOCRRecognitionResult result = (BlinkOCRRecognitionResult) dataArray[0];

                String scanned = result.getParsedResult("Raw");
                result.getOcrResult();

                OcrResult ocrResult = result.getOcrResult();

                // method 'findString' is not optimized for speed
                OcrResultIterator iter = findString(" ", ocrResult);
                Log.d("ocr", "Scanned: " + scanned + "!!!");

                String num = "";

                if (iter != null) {
                    while (iter.hasNext()) {
                        iter.moveToNext();

                        if (isNumber(iter) || (!num.equals("") && !num.substring(num.length() - 1).equals(".")
                                && String.valueOf(iter.getCurrentChar().getValue()).equals("."))) {

                            String s = String.valueOf(iter.getCurrentChar().getValue());
                            num = num + s;
                            Log.d("ocr", "Znak je broj: " + s + " ");
                            Log.d("ocr", "Appendano: " + num + " ");

                        } else if (!num.equals("")) {
                            OcrChar ch = iter.getCurrentChar();
                            numberX = ch.getPosition().getX();
                            numberY = ch.getPosition().getY();

                            try {

                                lineLength = num;

                                Log.d("ocr", "Nova duljina linije: " + lineLength + " ");
                                selectedLine = getTouchedLine(numberX, numberY);
                                RecognitionMethods.refreshLines(rp.getLineSegments(), selectedLine, lineLength);
                                updateDescription();

                                break;

                            } catch (Exception e) {
                            }
                        }

                    }
                }

                // if(scanned != null && !scanned.isEmpty()) {
                //ocrResult = result.getOcrResult();

                // OcrResultIterator iter = stringMatch("i",ocrResult);
                // int i = 0;
                /*
                while(iter.hasNext()) {
                    i++;
                    String s = String.valueOf(iter.getCurrentChar().getValue());
                    Log.d("ocr", "Trenutni iter " + s+" "+i);
                    
                    if (s != null) {
                    
                        s = s.trim();
                    
                        if (!s.equals("")) {
                            if (isNumber(iter)) {
                    
                                Log.d("ocr", "iter=daa");
                    
                                OcrChar ch = iter.getCurrentChar();
                    
                                final float numberX = ch.getPosition().getX();
                                final float numberY = ch.getPosition().getY();
                    
                                selectedLine = getTouchedLine(numberX,numberY);
                                lineLength = s;
                    
                                RecognitionMethods.refreshLines(rp.getLineSegments(),selectedLine,lineLength);
                                updateDescription();
                            }
                        }
                    
                    
                    }
                }
                    
                    
                if (iterCM != null) { // we found "Carat Weight"
                    // first display the result in text (from found "Carat Weight" position onwards)
                    //mOcrResult.setText("Horizontal text: " + iteratorToString(iterCM));
                    
                   // if(mLastScannedImage != null) {
                        OcrChar ch = iterCM.getCurrentChar();
                    
                        Log.d("ocr","ch="+ ch+"!!!");
                        // now determine position of image where vertical text is expected
                        // let's say it is the whole vertical strip of image ending with
                        // first letter of string "Carat Weight"
                    
                        final float charX = ch.getPosition().getX();
                        final float charY = ch.getPosition().getY();
                         Log.d("ocr","ch="+ charX+" "+charY+"!!!");
                    
                        // determine region that will be scanned for vertical text as vertical
                        // strip left of string "Carat Weight"
                    
                       Rect roi = new Rect((int) charX-150, (int)charY-150, (int) charX+150, (int)charY+150);
                    
                        // set the calculated ROI to image that we will scan
                        try{
                            mLastScannedImage.setROI(roi);
                        } catch(Exception e){
                    
                        }
                    
                        // rotate image's orientation by 90 degrees clockwise (this is fast operation, just rotation info
                        // is updated, no pixels are moved)
                        // mLastScannedImage.setImageOrientation(mLastScannedImage.getImageOrientation().rotate90Clockwise());
                    
                        // check if recognizer is still active (required if onScanningDone was called after activity was destroyed)
                        if(mRecognizer != null) {
                            // pause scanning to prevent arrival of new results while DirectAPI processes image
                            //mRecognizerView.pauseScanning();
                            // finally, perform recognition of image
                            mRecognizer.recognizeImage(mLastScannedImage, new ScanResultListener() {
                                @Override
                                public void onScanningDone(RecognitionResults results) {
                                    BaseRecognitionResult[] baseRecognitionResults = results.getRecognitionResults();
                                    if (baseRecognitionResults != null && baseRecognitionResults.length == 1) {
                                        if (baseRecognitionResults[0] instanceof BlinkOCRRecognitionResult) {
                                            BlinkOCRRecognitionResult result = (BlinkOCRRecognitionResult) baseRecognitionResults[0];
                                            final String verticalText = result.getParsedResult("Raw");
                                            runOnUiThread(new Runnable() {
                                                @Override
                                                public void run() {
                                                    String res = verticalText;
                                                    String before = res.split("cm")[0];
                                                    String str = before.replaceAll("[^0-9?!\\.]","");
                                                    Double l = Double.parseDouble(str);
                    
                                                    Log.d("ocr","Broj=" + Double.toString(l) +" na lokaciji: "+charX+" "+charY);
                                                }
                                            });
                                        }
                                    }
                                    // resume video scanning loop and reset internal state
                                    //mRecognizerView.resumeScanning(true);
                                }
                            });
                        }
                        // if bitmap cannot be recognized, this means stop() and destroy() have been called, so resuming scanning makes no sense
                  //  }
                } */

            }
        }
        //}

    }

    /**
     * Simple OcrResultIterator implementation. Not optimized for speed.
     */
    private class OcrResultIterator {
        private int mBlock = 0;
        private int mLine = 0;
        private int mChar = 0;
        private OcrResult mOcrResult = null;

        public OcrResultIterator(OcrResult result) {
            mOcrResult = result;
        }

        /**
         * copy constructor
         */
        public OcrResultIterator(OcrResultIterator other) {
            mOcrResult = other.mOcrResult;
            mBlock = other.mBlock;
            mChar = other.mChar;
            mLine = other.mLine;
        }

        public OcrChar getCurrentChar() {
            return mOcrResult.getBlocks()[mBlock].getLines()[mLine].getChars()[mChar];
        }

        boolean hasNext() {
            return (mBlock < mOcrResult.getBlocks().length - 1)
                    || (mLine < mOcrResult.getBlocks()[mBlock].getLines().length - 1)
                    || (mChar < mOcrResult.getBlocks()[mBlock].getLines()[mLine].getChars().length - 1);
        }

        /** moves to next char and returns true if new line is crossed */
        public boolean moveToNext() {
            boolean newLine = false;
            mChar++;
            if (mChar == mOcrResult.getBlocks()[mBlock].getLines()[mLine].getChars().length) {
                mChar = 0;
                mLine++;
                newLine = true;
                if (mLine == mOcrResult.getBlocks()[mBlock].getLines().length) {
                    mLine = 0;
                    mBlock++;
                }
            }
            return newLine;
        }
    }

    /**
     * very inefficient implementation of string search
     */
    private OcrResultIterator findString(String str, OcrResult result) {
        OcrResultIterator iter = new OcrResultIterator(result);
        while (iter.hasNext()) {
            if (stringMatch(str, iter))
                return iter;
            iter.moveToNext();
        }
        return null;
    }

    /**
     * Returns true if string matches OcrResult from position pointed by iterator.
     */
    private boolean stringMatch(String str, OcrResultIterator iter) {
        OcrResultIterator iterCopy = new OcrResultIterator(iter);
        for (int pos = 0; pos < str.length(); ++pos) {
            if (!charMatch(str.charAt(pos), iterCopy.getCurrentChar()))
                return false;
            if (pos != str.length() - 1 && !iterCopy.hasNext())
                return false;
            iterCopy.moveToNext();
        }
        return true;
    }

    private boolean isNumber(OcrResultIterator iter) {
        OcrResultIterator iterCopy = new OcrResultIterator(iter);
        OcrChar c = iterCopy.getCurrentChar();
        String s = c.toString();
        if (isNumeric(s)) {
            return true;
        }

        return false;
    }

    public static boolean isNumeric(String str) {
        try {
            double d = Double.parseDouble(str);
        } catch (NumberFormatException nfe) {
            return false;
        }
        return true;
    }

    /**
     * Returns true if char matches given OcrChar or any of its recognition alternatives
     */
    private boolean charMatch(char c, OcrChar ocrC) {
        if (c == ocrC.getValue())
            return true;
        // check alternatives
        OcrChar[] variants = ocrC.getRecognitionVariants();
        if (variants != null) { // some chars do not have alternatives
            for (OcrChar var : variants) {
                if (c == var.getValue())
                    return true;
            }
        }
        return false;
    }

    private static File getOutputMediaFile(String ime) {
        File mediaStorageDir = new File(
                Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "Angular");
        if (!mediaStorageDir.exists()) {
            if (!mediaStorageDir.mkdirs()) {
                Log.d(Conf.TAG, "failed to create directory");
                return null;
            }
        }

        // Create a media file name
        String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());

        return new File(mediaStorageDir.getPath() + File.separator + "IMG_" + timeStamp + ime + ".jpg");
    }

    private void storeImage(Bitmap image) {
        File pictureFile = getOutputMediaFile("slikson");
        if (pictureFile == null) {
            Log.d(Conf.TAG, "Error creating media file, check storage permissions: ");// e.getMessage());
            return;
        }
        try {
            FileOutputStream fos = new FileOutputStream(pictureFile);
            image.compress(Bitmap.CompressFormat.PNG, 90, fos);
            fos.close();
        } catch (FileNotFoundException e) {
            Log.d(Conf.TAG, "File not found: " + e.getMessage());
        } catch (IOException e) {
            Log.d(Conf.TAG, "Error accessing file: " + e.getMessage());
        }

    }

}