Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:com.ibm.streamsx.edgevideo.device.FaceDetector.java

License:Open Source License

public Mat toGrayscale(Mat rgbFrame) {
    Mat grayFrame = new Mat();
    Imgproc.cvtColor(rgbFrame, grayFrame, Imgproc.COLOR_BGRA2GRAY);
    Imgproc.equalizeHist(grayFrame, grayFrame);
    return grayFrame;
}

From source file:com.joowon.returnA.classifier.cv.PdfPageDivider.java

License:Open Source License

public PdfPageDivider divide() {
    // generate gray scale and blur
    Mat gray = new Mat();
    Imgproc.cvtColor(img, gray, Imgproc.COLOR_BGR2GRAY);
    Imgproc.blur(gray, gray, new Size(3, 3));

    // detect the edges
    Mat edges = new Mat();
    int lowThreshold = 50;
    int ratio = 3;
    Imgproc.Canny(gray, edges, lowThreshold, lowThreshold * ratio);

    lines = new Mat();
    Imgproc.HoughLinesP(edges, lines, 10, Math.PI / 180, 50, 50, 10);
    return this;
}

From source file:com.joravasal.keyface.CameraAccessView.java

License:Open Source License

public Mat correctCameraImage(Mat image) {
    //Log.i(tag, "Correcting image rotation");
    //Check rotation of device
    int rotation = ((KeyFaceActivity) this.getContext()).getWindowManager().getDefaultDisplay().getRotation();
    switch (rotation) {
    case Surface.ROTATION_0:
        int degrees = 90;
        //Mirror (y axis) if front camera and rotation in any case
        Mat imageResult = new Mat();
        //For some reason to rotate the image properly, we have to set the center like this
        Point center = new Point(image.width() / 2, image.width() / 2);
        Mat transform = Imgproc.getRotationMatrix2D(center, degrees, 1.0);
        try {/* w  ww . j  a va  2 s . co  m*/
            Imgproc.warpAffine(image, imageResult, transform, new Size(image.height(), image.width()));
        } catch (CvException e) {
            System.err.println(e.getMessage());
        }
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(imageResult, imageResult, -1);
        else
            Core.flip(imageResult, imageResult, 1);
        return imageResult;
    case Surface.ROTATION_90:
        //Mirror on y axis if front camera
        if (!KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, 1);
        break;
    case Surface.ROTATION_180:
        //Never gets here but just in case:
        break;
    case Surface.ROTATION_270:
        //Mirror on the x axis if rear camera, both axis if front camera
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, -1);
        else
            Core.flip(image, image, 0);
        break;
    default:
        break;
    }

    return image;
}

From source file:com.joravasal.keyface.EigenFacesActivity.java

License:Open Source License

@Override
public void onCreate(Bundle savedInstanceState) {
    Log.i("eigenFacesActivity::", "OnCreate");
    super.onCreate(savedInstanceState);

    setContentView(R.layout.eigenfaces);
    setTitle("Eigenfaces");
    Mat aver = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getAverage();
    Mat faces = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getEigenFaces();

    int size = new Integer(KeyFaceActivity.prefs.getString("savedFaceSize", "200"));
    Mat aux = new Mat();

    aver = aver.reshape(1, size);/*from  w w  w.j  av  a  2  s. c o  m*/
    //aver.convertTo(aux, );
    aver = toGrayscale(aver);
    average = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
    Imgproc.cvtColor(aver, aux, Imgproc.COLOR_GRAY2RGBA, 4);
    Utils.matToBitmap(aux, average);
    LinearLayout layout = (LinearLayout) findViewById(id.eigenFacesHorizontalLayout);

    TextView avrgImgTV = new TextView(getApplicationContext());
    avrgImgTV.setText("Average image:");
    avrgImgTV.setPadding(5, 10, 10, 20);
    avrgImgTV.setGravity(Gravity.CENTER);

    TextView eigenfacesImgsTV = new TextView(getApplicationContext());
    eigenfacesImgsTV.setText("Eigenfaces:");
    eigenfacesImgsTV.setPadding(5, 10, 10, 20);
    eigenfacesImgsTV.setGravity(Gravity.CENTER);

    ImageView imgV = new ImageView(getApplicationContext());

    imgV.setClickable(false);
    imgV.setVisibility(0);
    imgV.setPadding(0, 10, 10, 20);
    imgV.setImageBitmap(average);

    layout.addView(avrgImgTV);
    layout.addView(imgV);
    layout.addView(eigenfacesImgsTV);

    LinkedList<ImageView> variables = new LinkedList<ImageView>();
    eigenfacesList = new LinkedList<Bitmap>();
    for (int i = 0; i < faces.rows(); i++) {
        variables.add(new ImageView(getApplicationContext()));
        eigenfacesList.add(Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888));

        aux = new Mat();
        aux = faces.row(i).reshape(1, size);
        aux = toGrayscale(aux);
        Mat auxGreyC4 = new Mat();
        Imgproc.cvtColor(aux, auxGreyC4, Imgproc.COLOR_GRAY2RGBA, 4);
        Utils.matToBitmap(auxGreyC4, eigenfacesList.get(i));

        variables.get(i).setClickable(false);
        variables.get(i).setVisibility(0);
        variables.get(i).setPadding(0, 10, 10, 20);
        variables.get(i).setImageBitmap(eigenfacesList.get(i));
        layout.addView(variables.get(i));
    }

    Button save = (Button) findViewById(id.saveEigenfacesB);
    save.setOnClickListener(this);
}

From source file:com.joravasal.keyface.FindFacesView.java

License:Open Source License

@Override
protected Bitmap processFrame(VideoCapture camera) {
    //Log.i(tag,"Processing frame for our delight");

    Mat mRgbaAux = new Mat();
    Mat mGrayAux = new Mat();
    camera.retrieve(mRgbaAux, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    camera.retrieve(mGrayAux, Highgui.CV_CAP_ANDROID_GREY_FRAME);
    //Correct the direction of the image
    mRgba = correctCameraImage(mRgbaAux);
    mGray = correctCameraImage(mGrayAux);

    AlgorithmReturnValue resExample = null;
    //We look for faces in the captured images
    if (cascade != null) {
        int faceSize = Math.round(mGray.rows() * KeyFaceActivity.minFaceSize);
        List<Rect> faces = new LinkedList<Rect>();
        try {/* ww w  .  ja v  a2 s . c  o m*/
            cascade.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(faceSize, faceSize));
        } catch (CvException e) {
            System.err.println(e.getMessage());
        }
        for (Rect r : faces) { //For each face

            //The Rectangle commented is the area that will be used to check the face,
            //but an ellipse is shown instead, I think it looks better.
            //Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0,0,255,100), 3);

            String nombre = null;

            // We try to recognize it
            AlgorithmReturnValue res = KeyFaceActivity.recogAlgorithm.recognizeFace(mGray.submat(r));
            resExample = res;
            if (res.getResult() != -1) {
                //if it worked, we find the name
                nombre = findName(res.getResult());
            }
            Point center = new Point(r.x + (r.width / 2), r.y + (r.height / 2));
            //If nombre is null we have no name, thus is unrecognized and draw a red circle, together with the text "Unknown"
            if (nombre == null) {
                Core.ellipse(mRgba, center, new Size(r.width / 2 - 5, r.height / 2 + 20), 0, 0, 360,
                        new Scalar(255, 0, 0, 30), 3);
                Core.rectangle(mRgba, new Point(r.x + 45, r.y + r.height + 20),
                        new Point(r.x + 200, r.y + r.height + 60), new Scalar(70, 50, 50, 255), Core.FILLED);
                Core.putText(mRgba, "Unknown", new Point(r.x + 50, r.y + r.height + 50),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(200, 200, 200, 100));

                //Check if the user is tryaing to save a new face
                if (KeyFaceActivity.addingFaces && faces.size() == 1) {
                    //All is in order, we save a new image and update our account of faces. We update the recognizer data as well.
                    addFaceToDB(mGray, r, savedFaces);

                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            KeyFaceActivity.prefs.edit()
                                    .putInt("savedFaces", KeyFaceActivity.prefs.getInt("savedFaces", 0) + 1)
                                    .apply();
                        }
                    });

                    /*KeyFaceActivity.lock.lock();
                    try {
                       KeyFaceActivity.faceAdded = true;
                       KeyFaceActivity.addingFaces = false;
                       KeyFaceActivity.condition.signalAll();
                    }
                    finally {
                       KeyFaceActivity.lock.unlock();
                    }
                    */

                    if (!KeyFaceActivity.recogAlgorithm.updateData(false)) {
                        System.err.println("Couldn't update the recognition algorithm with the new picture.");
                    }
                    KeyFaceActivity.addingFaces = false;

                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext, "Face saved successfully!",
                                    Toast.LENGTH_SHORT).show();
                        }
                    });
                }
                //The user tried to save a face when there was more than one, it fails and sends a message to the user.
                else if (KeyFaceActivity.addingFaces && faces.size() > 1) {
                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext,
                                    "Make sure there is only one face!", Toast.LENGTH_SHORT).show();
                        }
                    });
                    KeyFaceActivity.addingFaces = false;
                }
            }

            else { //We know this face!
                Core.ellipse(mRgba, center, new Size(r.width / 2 - 5, r.height / 2 + 20), 0, 0, 360,
                        new Scalar(0, 255, 0, 100), 3);
                Core.rectangle(mRgba, new Point(r.x + 45, r.y + r.height + 20),
                        new Point(r.x + 200, r.y + r.height + 60), new Scalar(50, 70, 50, 255), Core.FILLED);
                Core.putText(mRgba, nombre, new Point(r.x + 50, r.y + r.height + 50), Core.FONT_HERSHEY_PLAIN,
                        2, new Scalar(0, 255, 0, 100));
                if (KeyFaceActivity.addingFaces && faces.size() == 1) {
                    //If the user tries to save a face when it is already known we don let him.
                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext, "This face is already known!",
                                    Toast.LENGTH_SHORT).show();
                        }
                    });
                    KeyFaceActivity.addingFaces = false;
                }
            }
        }
        //If there is no face we tell the user there was a mistake
        if (KeyFaceActivity.addingFaces && faces.size() <= 0) {
            KeyFaceActivity.toastHandler.post(new Runnable() {
                public void run() {
                    Toast.makeText(KeyFaceActivity.globalappcontext, "No face found!", Toast.LENGTH_SHORT)
                            .show();
                }
            });
            KeyFaceActivity.addingFaces = false;
        }
    }

    savedFaces = KeyFaceActivity.prefs.getInt("savedFaces", savedFaces);

    if (KeyFaceActivity.prefs.getBoolean("showData", false)) {
        try {
            if (resExample != null) {
                //background rectangle for extra info on PCA
                Core.rectangle(mRgba, new Point(0, mRgba.height() - 100),
                        new Point(mRgba.width(), mRgba.height()), new Scalar(50, 50, 50, 50), Core.FILLED);
                //Data for closest image 
                Core.putText(mRgba, "1st", new Point(5, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getClosestImage()),
                        new Point(5, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Double.toString(resExample.getDistClosestImage() / 100000).substring(0, 6),
                        new Point(5, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Data for second closest image
                Core.putText(mRgba, "2nd", new Point(180, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getSecondClosestImage()),
                        new Point(180, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba,
                        Double.toString(resExample.getDistSecondClosestImage() / 100000).substring(0, 6),
                        new Point(180, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Data for farthest image
                Core.putText(mRgba, "Last", new Point(355, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getFarthestImage()),
                        new Point(355, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Double.toString(resExample.getDistFarthestImage() / 100000).substring(0, 6),
                        new Point(355, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Num images and threshold
                Core.putText(mRgba, "Images:" + savedFaces, new Point(15, mRgba.height() - 5),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(250, 250, 250, 200));
                Core.putText(mRgba,
                        "Th:" + Double.toString(resExample.getThreshold() / 100000).substring(0,
                                Math.min(6, Double.toString(resExample.getThreshold() / 100000).length())),
                        new Point(240, mRgba.height() - 5), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
            } else {
                Core.rectangle(mRgba, new Point(0, mRgba.height() - 30), new Point(200, mRgba.height()),
                        new Scalar(50, 50, 50, 50), Core.FILLED);
                Core.putText(mRgba, "Images:" + savedFaces, new Point(15, mRgba.height() - 5),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(250, 250, 250, 200));
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    if (Utils.matToBitmap(mRgba, bmp))
        return bmp;

    bmp.recycle();
    return null;
}

From source file:com.joravasal.keyface.FindFacesView.java

License:Open Source License

@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
    super.surfaceChanged(_holder, format, width, height);
    Log.i(tag, "Surfance changed");
    synchronized (this) {
        mGray = new Mat();
        mRgba = new Mat();
    }/*from w  w  w  .  j  a v  a2 s .  co  m*/
}

From source file:com.joravasal.keyface.PCAfaceRecog.java

License:Open Source License

public PCAfaceRecog(String address, Size size) {
    imagesDir = address;/*from   w ww  . ja  v a 2  s . c  o m*/
    imageSize = size;

    imgLength = (int) (size.width * size.height);

    average = new Mat();
    eigenfaces = new Mat();

    updateData(false);
}

From source file:com.minio.io.alice.MainActivity.java

License:Open Source License

public void onCameraViewStarted(int width, int height) {
    srcMat = new Mat();
    blackMat = new Mat();

}

From source file:com.mycompany.linedetection.LineDetector.java

public LineDetector(String fileName) {
    img = Imgcodecs.imread(fileName);/*from ww w .ja  va2s.  c  o m*/
    edgeDetectedImg = new Mat();
    lineList = new ArrayList();
    diagonalLineList = new ArrayList();

    horizontalLine = new Line(0, 0, img.width() - 1, 0);
    Line l2 = new Line(0, 0, img.width() - 1, img.height() - 1);
    Line l3 = new Line(img.width() - 1, 0, 0, img.height() - 1);

    diagAngle1 = horizontalLine.getAngle(l2);
    diagAngle2 = horizontalLine.getAngle(l3);
}

From source file:com.mycompany.linedetection.LineDetector.java

public void findLines() {
    Imgproc.Canny(img, edgeDetectedImg, 100, 200, 3, true);
    Mat lines = new Mat();

    int width = img.width();
    int height = img.height();
    double diagonal = Math.sqrt(width * width + height * height);
    int minOfWidthHeight = (width < height) ? width : height;

    Imgproc.HoughLinesP(edgeDetectedImg, lines, 1, Math.PI / 180, minOfWidthHeight * 10 / 100,
            diagonal * 25 / 100, diagonal * 4 / 100);

    int firstN = (lines.rows() < 5) ? lines.rows() : 5;

    for (int x = 0; x < lines.rows(); x++) {
        double[] vec = lines.get(x, 0);
        double[] vec1 = lines.get(x, 1);
        double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        Point startPoint = new Point(x1, y1);
        Point endPoint = new Point(x2, y2);

        double angle_inv = horizontalLine.getAngle(new Line(x1, y1, x2, y2));
        double angle = horizontalLine.getAngle(new Line(x2, y2, x1, y1));
        if ((angle >= diagAngle1 - DIAGONAL_TRESHOLD && angle <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle >= diagAngle2 - DIAGONAL_TRESHOLD && angle <= diagAngle2 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle1 - DIAGONAL_TRESHOLD && angle_inv <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle2 - DIAGONAL_TRESHOLD
                        && angle_inv <= diagAngle2 + DIAGONAL_TRESHOLD)) {
            diagonalLineList.add(new Line(x1, y1, x2, y2));
            Imgproc.line(img, startPoint, endPoint, new Scalar(255, 255, 0), 4);
        } else {/*  w w w  . j a  va2 s .  co m*/
            lineList.add(new Line(x1, y1, x2, y2));
        }

    }

    Collections.sort(lineList, new Comparator<Line>() {
        @Override
        public int compare(Line l1, Line l2) {
            return (int) (l2.getLength() - l1.getLength());
        }

    });

    ArrayList arr = new ArrayList<Line>();

    for (int i = 0; i < firstN + 1; i++) {
        if (lineList.size() >= firstN + 1) {
            double x1 = lineList.get(i).getX1(), y1 = lineList.get(i).getY1(), x2 = lineList.get(i).getX2(),
                    y2 = lineList.get(i).getY2();
            Point startPoint = new Point(x1, y1);
            Point endPoint = new Point(x2, y2);
            arr.add(lineList.get(i));
            Imgproc.line(img, startPoint, endPoint, new Scalar(0, 0, 255), 3);
        }
    }
    lineList = arr;
}