Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:edu.sust.cse.analysis.image.ImageReader.java

public static void main(String[] args) {

    // Load an image file and display it in a window.
    Mat m1 = Highgui.imread("C:\\Users\\Eaiman\\Downloads\\test2\\Thesis\\test5.jpg");
    imshow("Original", m1);

    // Do some image processing on the image and display in another window.
    Mat m2 = new Mat();
    Imgproc.bilateralFilter(m1, m2, -1, 50, 10);
    Imgproc.Canny(m2, m2, 10, 200);//from w  w  w .java  2  s  .c  o  m
    imshow("Edge Detected", m2);
    detectLetter(m1);

}

From source file:edu.sust.cse.analysis.image.ThirdTry.java

public static void main(String[] args) {

    // Load an image file and display it in a window.
    Mat m1 = Highgui.imread("C:\\Users\\Eaiman\\Downloads\\test2\\Thesis\\test5.jpg");
    //imshow("Original", m1);

    // Do some image processing on the image and display in another window.
    Mat m2 = new Mat();
    Imgproc.bilateralFilter(m1, m2, -1, 50, 10);
    Imgproc.Canny(m2, m2, 10, 200);//ww w. j a v a 2s .com
    imshow("Edge Detected", m2);
    detectLetter(m1, m2);

}

From source file:edu.sust.cse.analysis.news.NewsAnalysis.java

public static void main(String[] args) throws IOException {

    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-01.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-01-145.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-02.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-03.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-04.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-05.jpg");
    //                 Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-01.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-04_resized.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\Camscanner Output\\normal_output_scan0007.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\Camscanner Output\\normal_output_scan0007-01.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\Camscanner Output\\normal_output_scan0001-01.bmp");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-300.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-96.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0001-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Thesis-4-1\\Previous Work\\OPenCv2\\eProthomAlo Sample I-O\\e-5-12.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Thesis-4-1\\Previous Work\\OPenCv2\\eProthomAlo Sample I-O\\e-6-12.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-300B.jpg");
    Mat inputImageMat = Highgui//from   w ww.  j a  v  a  2s .c om
            .imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-145B.jpg");
    if (null == inputImageMat) {
        System.out.println("[INPUT IMAGE NULL]");
    }
    Mat image = new Mat();//normal_output_scan0002.jpg
    double ratio = 150 / 72.0; // 4.167
    System.out.println("WIDTH: " + inputImageMat.width() + " HEIGHT:" + inputImageMat.height());
    int inputWidth = (int) (inputImageMat.width() * ratio);
    int inputHeight = (int) (inputImageMat.height() * ratio);
    System.out.println("WIDTH: " + inputWidth + " HEIGHT:" + inputHeight);

    //        inputImageMat = image;
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-02.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-03.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-04.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-05.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\web001.png");
    Debug.debugLog("[Image [Cols, Rows]: [" + inputImageMat.cols() + ", " + inputImageMat.rows() + "]]");
    //        imshow("Original", inputImageMat);
    ViewerUI.show("Original", inputImageMat, ViewableUI.SHOW_ORIGINAL);
    //        ViewerUI.show("Original-Histogram", Histogram.getHistogram(inputImageMat), ViewableUI.SHOW_HISTOGRAM_ORIGINAL);

    // Do some image processing on the image and display in another window.
    Mat filteredImage = new Mat();
    /**
     * We have explained some filters which main goal is to smooth an input
     * image. However, sometimes the filters do not only dissolve the noise,
     * but also smooth away the edges
     */
    //        Imgproc.bilateralFilter(inputImageMat, m2, -1, 50, 10); /*Previous line for noise filtering*/
    Imgproc.bilateralFilter(inputImageMat, filteredImage, -1, 50, 10);
    //        Imgproc.bilateralFilter(inputImageMat, filteredImage, -1, 150, 11);

    ViewerUI.show("Noise Filter", filteredImage, ViewableUI.SHOW_NOISE_FILTER);
    //        ViewerUI.show("Noise Filter-Histogram", Histogram.getHistogram(filteredImage), ViewableUI.SHOW_HISTOGRAM_NOISE_FILTER);
    Imgproc.Canny(filteredImage, filteredImage, 10, 150);
    //        Imgproc.bilateralFilter(filteredImage, filteredImage, -1, 50, 10);
    //        Imgproc.threshold(filteredImage, filteredImage, 250, 300,Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C );
    //Imgproc.cvtColor(m1, m1, Imgproc.COLOR_RGB2GRAY, 0);
    //        imshow("Edge Detected", m2);
    ViewerUI.show("Edge Detected", filteredImage, ViewableUI.SHOW_EDGE_DETECTION);
    //        ViewerUI.show("Edge Detected-Histogram", Histogram.getHistogram(filteredImage), ViewableUI.SHOW_HISTOGRAM_EDGE_DETECTION);

    Size sizeA = filteredImage.size();
    System.out.println("Width: " + sizeA.width + " Height: " + sizeA.height);
    int width = (int) sizeA.width;
    int height = (int) sizeA.height;
    int pointLength[][][] = new int[height][width][2];
    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            //double[] data = m2.get(i, j);
            if (filteredImage.get(i, j)[0] != 0) {
                pointLength[i][j][0] = 0;
                pointLength[i][j][1] = 0;
                continue;
            }
            if (j != 0 && filteredImage.get(i, j - 1)[0] == 0) {
                pointLength[i][j][0] = pointLength[i][j - 1][0];
            } else {
                int count = 0;
                for (int k = j + 1; k < width; k++) {
                    if (filteredImage.get(i, k)[0] == 0) {
                        count++;
                    } else {
                        break;
                    }
                }
                pointLength[i][j][0] = count;
            }
            if (i != 0 && filteredImage.get(i - 1, j)[0] == 0) {
                pointLength[i][j][1] = pointLength[i - 1][j][1];
            } else {
                int count = 0;
                for (int k = i + 1; k < height; k++) {
                    if (filteredImage.get(k, j)[0] == 0) {
                        count++;
                    } else {
                        break;
                    }
                }
                pointLength[i][j][1] = count;
            }
        }
    }
    String temp = "";
    Mat convertArea = filteredImage.clone();

    int[][] blackWhite = new int[height][width];

    for (int i = 0; i < height; i++) {
        temp = "";
        for (int j = 0; j < width; j++) {
            if (i == 0 || j == 0 || i == height - 1 || j == width - 1) {
                temp = temp + "@";
                blackWhite[i][j] = 1;

                double[] data = filteredImage.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else if (pointLength[i][j][0] > 150 && pointLength[i][j][1] > 6) {
                temp = temp + "@";
                blackWhite[i][j] = 1;

                double[] data = filteredImage.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else if (pointLength[i][j][0] > 7 && pointLength[i][j][1] > 200) {
                temp = temp + "@";
                blackWhite[i][j] = 1;

                double[] data = filteredImage.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else {
                temp = temp + " ";
                blackWhite[i][j] = 0;

                double[] data = filteredImage.get(i, j);
                data[0] = 0.0;
                convertArea.put(i, j, data);
            }

        }
    }
    ViewerUI.show("Convertion", convertArea, ViewableUI.SHOW_CONVERSION);
    //        ViewerUI.show("Convertion-Histogram", Histogram.getHistogram(convertArea), ViewableUI.SHOW_HISTOGRAM_CONVERSION);

    ImageDetection isImage = new ImageDetection();
    HeadlineDetection isHeadline = new HeadlineDetection();

    ImageBorderDetectionBFS imgBFS = new ImageBorderDetectionBFS();
    ArrayList<BorderItem> borderItems = imgBFS.getBorder(blackWhite, width, height, filteredImage,
            inputImageMat);
    // Mat[] subMat = new Mat[borderItems.size()];
    //        for (int i = 0; i < borderItems.size(); i++) {
    //            subMat[i] = m2.submat(borderItems.get(i).getMinX(), borderItems.get(i).getMaxX(),
    //                    borderItems.get(i).getMinY(), borderItems.get(i).getMaxY());
    //            if (isImage.isImage(subMat[i])) {
    //                System.out.println("subMat" + i + " is an image");
    //                imshow("Image" + i, subMat[i]);
    //
    //            }else if(isHeadline.isHeadLine(subMat[i])){
    //                System.out.println("subMat" + i + " is an Headline");
    //                imshow("Headline" + i, subMat[i]);
    //            }else{
    //                System.out.println("subMat" + i + " is an Column");
    //                imshow("Column" + i, subMat[i]);
    //            }
    //            //imshow("subMat" + i, subMat[i]);
    //            bw.close();
    //
    //        }

    boolean[] imageIndexer = new boolean[borderItems.size()];
    int[] lineHeight = new int[borderItems.size()];
    int highestLinheight = -1, lowestLineHeight = 10000;
    int totalHeight = 0, notImage = 0;

    for (int i = 0; i < borderItems.size(); i++) {
        lineHeight[i] = 0;
        BorderItem borderItem = borderItems.get(i);
        //            subMat[i] = m2.submat(borderItems.get(i).getMinX(), borderItems.get(i).getMaxX(),
        //                    borderItems.get(i).getMinY(), borderItems.get(i).getMaxY());
        //            if (isImage.isImage(subMat[i])) {
        //                System.out.println("subMat" + i + " is an image");
        //                imshow("Image" + i, subMat[i]);
        //                imageIndexer[i] = true;
        //                continue;
        //            }else{
        //                notImage++;
        //                imageIndexer[i] = false;
        //            }
        if (borderItem.getIsImage()) {
            System.out.println("subMat" + i + " is an image");
            //                imshow("Image" + i, borderItem.getBlock());
            ViewerUI.show("Image" + i, borderItem.getBlock(), ViewableUI.SHOW_IMAGE);
            //                ViewerUI.show("Image-Histogram" + i, Histogram.getHistogram(borderItem.getBlock()), ViewableUI.SHOW_HISTOGRAM_IMAGE);

            imageIndexer[i] = true;
            continue;
        } else {
            notImage++;
            imageIndexer[i] = false;
        }

        //            totalHeight += lineHeight[i] = getLineHeight(subMat[i]);
        Mat fake = new Mat();
        Imgproc.cvtColor(borderItem.getBlock(), fake, Imgproc.COLOR_RGB2GRAY, 0);
        totalHeight += lineHeight[i] = getLineHeight(fake);
        fake.release();
        System.out.println("line height " + i + ": " + lineHeight[i]);
        //            imshow("" + i, borderItems.get(i).getBlock());
        if (lineHeight[i] > highestLinheight) {
            highestLinheight = lineHeight[i];
        }
        if (lineHeight[i] < lowestLineHeight) {
            lowestLineHeight = lineHeight[i];
        }

        //            if(i==7)
        //                break;
    }

    int avgLineHeight = totalHeight / notImage;

    for (int i = 0; i < borderItems.size(); i++) {
        if (!imageIndexer[i]) {
            if (lineHeight[i] - lowestLineHeight > 13 && lineHeight[i] >= 45) {
                //                    imshow("Headline" + i, subMat[i]);
                //                    imshow("Headline" + i, borderItems.get(i).getBlock());
                ViewerUI.show("Headline" + i, borderItems.get(i).getBlock(), ViewableUI.SHOW_HEADING);
                //                    ViewerUI.show("Headline-Histogram" + i, Histogram.getHistogram(borderItems.get(i).getBlock()), ViewableUI.SHOW_HISTOGRAM_HEADING);

            } else if (lineHeight[i] - lowestLineHeight > 8 && lineHeight[i] >= 21 && lineHeight[i] < 45) {
                //                    imshow("Sub Headline" + i, borderItems.get(i).getBlock());
                ViewerUI.show("Sub Headline" + i, borderItems.get(i).getBlock(), ViewableUI.SHOW_SUB_HEADING);
                //                    ViewerUI.show("Sub Headline-Histogram" + i, Histogram.getHistogram(borderItems.get(i).getBlock()), ViewableUI.SHOW_HISTOGRAM_SUB_HEADING);

            } else {
                //                    imshow("Column" + i, subMat[i]);
                //                    imshow("Column" + i, borderItems.get(i).getBlock());
                ViewerUI.show("Column" + i, borderItems.get(i).getBlock(), ViewableUI.SHOW_COLUMN);
                //                    ViewerUI.show("Column-Histogram" + i, Histogram.getHistogram(borderItems.get(i).getBlock()), ViewableUI.SHOW_HISTOGRAM_COLUMN);

            }
        }
    }

}

From source file:edu.ucue.tfc.Modelo.VideoProcessor.java

public Image getImageAtPos(int pos) throws IOException {
    if (video.isOpened()) {
        if (pos < getFrameCount() && pos > 0) {
            setFramePos(pos);/* w w w . j a v a  2s . c o m*/
            Mat tmp = new Mat();
            video.retrieve(tmp);
            setFramePos(0);
            try {
                Imgproc.resize(tmp, tmp, frameSize);
            } catch (Exception e) {
                System.out.println(e.getMessage());
            }
            return convertCvMatToImage(tmp);
        } else {
            return getImageAtPos(1);
        }
    } else {
        System.out.println("VideoCapture no est abierto!");
        return null;
    }
}

From source file:edu.ucue.tfc.Modelo.VideoProcessor.java

public void processVideo() {
    do {/*  www . j a  v  a2 s . co  m*/
        Mat tmp = new Mat();
        video.read(tmp);
        if (!tmp.empty()) {
            frame = tmp.clone();
            tmp.release();
            if (frameCounter < (getFrameCount() / 2) - 1) {
                frameCounter++;
                if (getMinutes() > 0) {
                    carsPerMinute = getDetectedCarsCount() / getMinutes();
                }

                processFrame(getFrame());
            } else {
                frameCounter = 0;
                finished = true;

                System.out.println("Reiniciando..");
                setFramePos(1);
            }
        } else {
            System.out.println("Imagen Vaca");
            frameCounter = 0;
            finished = true;

            System.out.println("Reiniciando..");
            setFramePos(1);
        }
    } while (frameCounter > (getFrameCount() / 2) - 2);
}

From source file:edu.wpi.first.wpilibj.examples.axiscamera.Robot.java

License:Open Source License

@Override
public void robotInit() {
    m_visionThread = new Thread(() -> {
        // Get the Axis camera from CameraServer
        AxisCamera camera = CameraServer.getInstance().addAxisCamera("axis-camera.local");
        // Set the resolution
        camera.setResolution(640, 480);//from   w w  w  .ja  v a 2s .c om

        // Get a CvSink. This will capture Mats from the camera
        CvSink cvSink = CameraServer.getInstance().getVideo();
        // Setup a CvSource. This will send images back to the Dashboard
        CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

        // Mats are very memory expensive. Lets reuse this Mat.
        Mat mat = new Mat();

        // This cannot be 'true'. The program will never exit if it is. This
        // lets the robot stop this thread when restarting robot code or
        // deploying.
        while (!Thread.interrupted()) {
            // Tell the CvSink to grab a frame from the camera and put it
            // in the source mat.  If there is an error notify the output.
            if (cvSink.grabFrame(mat) == 0) {
                // Send the output the error.
                outputStream.notifyError(cvSink.getError());
                // skip the rest of the current iteration
                continue;
            }
            // Put a rectangle on the image
            Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400), new Scalar(255, 255, 255), 5);
            // Give the output stream a new image to display
            outputStream.putFrame(mat);
        }
    });
    m_visionThread.setDaemon(true);
    m_visionThread.start();
}

From source file:edu.wpi.first.wpilibj.examples.intermediatevision.Robot.java

License:Open Source License

@Override
public void robotInit() {
    m_visionThread = new Thread(() -> {
        // Get the UsbCamera from CameraServer
        UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
        // Set the resolution
        camera.setResolution(640, 480);//from w w  w . j  a va  2  s  .co  m

        // Get a CvSink. This will capture Mats from the camera
        CvSink cvSink = CameraServer.getInstance().getVideo();
        // Setup a CvSource. This will send images back to the Dashboard
        CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

        // Mats are very memory expensive. Lets reuse this Mat.
        Mat mat = new Mat();

        // This cannot be 'true'. The program will never exit if it is. This
        // lets the robot stop this thread when restarting robot code or
        // deploying.
        while (!Thread.interrupted()) {
            // Tell the CvSink to grab a frame from the camera and put it
            // in the source mat.  If there is an error notify the output.
            if (cvSink.grabFrame(mat) == 0) {
                // Send the output the error.
                outputStream.notifyError(cvSink.getError());
                // skip the rest of the current iteration
                continue;
            }
            // Put a rectangle on the image
            Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400), new Scalar(255, 255, 255), 5);
            // Give the output stream a new image to display
            outputStream.putFrame(mat);
        }
    });
    m_visionThread.setDaemon(true);
    m_visionThread.start();
}

From source file:emotion.Eyebrow.java

public static void Harris(Mat img, boolean rightEyeFlag) {
    //Harris point extraction
    Mat harrisTestimg;/* ww w . java2 s  .com*/
    harrisTestimg = img.clone();
    cvtColor(harrisTestimg, harrisTestimg, Imgproc.COLOR_BGR2GRAY);
    threshold(harrisTestimg, harrisTestimg, 200, 255, Imgproc.THRESH_BINARY_INV);
    Mat struct = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3));
    erode(harrisTestimg, harrisTestimg, struct);
    dilate(harrisTestimg, harrisTestimg, struct);
    imwrite("intermediateHaaris.jpg", harrisTestimg);
    harrisTestimg.convertTo(harrisTestimg, CV_8UC1);
    ArrayList<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    Imgproc.findContours(harrisTestimg, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE);

    //System.out.println("Average Y for contours:");
    float[] averageY = new float[contours.size()];
    for (int i = 0; i < contours.size(); ++i) {
        //We calculate mean of Y coordinates for each contour
        for (int j = 0; j < contours.get(i).total(); ++j) {
            int val = (int) contours.get(i).toArray()[j].y;
            averageY[i] += val;
        }
        averageY[i] /= contours.get(i).total();
        //System.out.println(i+") "+averageY[i]);

        if (averageY[i] <= img.height() / 2 && //We consider just up half of an image
                contours.get(i).total() >= img.width()) //and longer than threshold
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(255, 255, 255));
        else
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(0, 0, 0));
    }

    MatOfPoint features = new MatOfPoint();
    Imgproc.goodFeaturesToTrack(harrisTestimg, features, 100, 0.00001, 0);

    //We draw just 2 extreme points- first and last
    Point eyebrowsPoints[] = new Point[2];
    for (int i = 0; i < features.toList().size(); i++) {
        if (i == 0) {
            eyebrowsPoints[0] = new Point(harrisTestimg.width() / 2, 0);
            eyebrowsPoints[1] = new Point(harrisTestimg.width() / 2, 0);
        }
        if (features.toArray()[i].x < eyebrowsPoints[0].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[0] = features.toArray()[i];
        }
        if (features.toArray()[i].x > eyebrowsPoints[1].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[1] = features.toArray()[i];
        }
    }
    StaticFunctions.drawCross(img, eyebrowsPoints[1], StaticFunctions.Features.EYEBROWS_ENDS);
    StaticFunctions.drawCross(img, eyebrowsPoints[0], StaticFunctions.Features.EYEBROWS_ENDS);
    imwrite("testHaris.jpg", img);
    if (rightEyeFlag) {
        EyeRegion.rightInnerEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.rightInnerEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightInnerEyebrowsCorner.y += Eye.rightRect.y;

        EyeRegion.rightOuterEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.rightOuterEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightOuterEyebrowsCorner.y += Eye.rightRect.y;
    } else {
        EyeRegion.leftInnerEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.leftInnerEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftInnerEyebrowsCorner.y += Eye.leftRect.y;

        EyeRegion.leftOuterEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.leftOuterEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftOuterEyebrowsCorner.y += Eye.leftRect.y;
    }
}

From source file:eu.fpetersen.robobrain.behavior.followobject.ColorBlobDetector.java

License:Open Source License

public void process(Mat rgbaImage) {
    Imgproc.pyrDown(rgbaImage, mPyrDownMat);
    Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

    Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);

    Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
    Imgproc.dilate(mMask, mDilatedMask, new Mat());

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    // Find max contour area
    maxArea = 0;/*from   w  ww. j a  v a 2  s  .  c  o  m*/
    Iterator<MatOfPoint> each = contours.iterator();
    Mat biggestContour = null;
    while (each.hasNext()) {
        MatOfPoint wrapper = each.next();
        double area = Imgproc.contourArea(wrapper);
        if (area > maxArea) {
            maxArea = area;
            biggestContour = wrapper.clone();
        }
    }
    if (biggestContour != null) {
        Core.multiply(biggestContour, new Scalar(4, 4), biggestContour);
        Moments mo = Imgproc.moments(biggestContour);
        centroidOfMaxArea = new Point(mo.get_m10() / mo.get_m00(), mo.get_m01() / mo.get_m00());
    } else {
        centroidOfMaxArea = null;
    }

    // Filter contours by area and resize to fit the original image size
    mContours.clear();
    each = contours.iterator();
    while (each.hasNext()) {
        MatOfPoint contour = each.next();
        if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) {
            Core.multiply(contour, new Scalar(4, 4), contour);
            mContours.add(contour);
        }
    }

    Imgproc.drawContours(rgbaImage, mContours, -1, CONTOUR_COLOR);
}

From source file:eu.fpetersen.robobrain.behavior.followobject.OrbObjectDetector.java

License:Open Source License

private Mat extractDescriptors(MatOfKeyPoint keypoints, Mat image) {
    Mat descriptors = new Mat();
    descriptorExtractor.compute(image, keypoints, descriptors);
    return descriptors;
}