Example usage for org.opencv.core Mat width

List of usage examples for org.opencv.core Mat width

Introduction

In this page you can find the example usage for org.opencv.core Mat width.

Prototype

public int width() 

Source Link

Usage

From source file:edu.sust.cse.analysis.news.NewsAnalysis.java

public static void main(String[] args) throws IOException {

    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-01.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-01-145.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-02.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-03.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-04.jpg");
    //                Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\e-05.jpg");
    //                 Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-01.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-04_resized.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\Camscanner Output\\normal_output_scan0007.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\Camscanner Output\\normal_output_scan0007-01.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\Camscanner Output\\normal_output_scan0001-01.bmp");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-300.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0007-96.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Google\\Thesis Work\\scan-01-dec\\scan0001-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Thesis-4-1\\Previous Work\\OPenCv2\\eProthomAlo Sample I-O\\e-5-12.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\Thesis-4-1\\Previous Work\\OPenCv2\\eProthomAlo Sample I-O\\e-6-12.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-145.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-300B.jpg");
    Mat inputImageMat = Highgui
            .imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\06-12-2015\\sc-03-145B.jpg");
    if (null == inputImageMat) {
        System.out.println("[INPUT IMAGE NULL]");
    }//from  w w w .  j  av  a 2s .c  om
    Mat image = new Mat();//normal_output_scan0002.jpg
    double ratio = 150 / 72.0; // 4.167
    System.out.println("WIDTH: " + inputImageMat.width() + " HEIGHT:" + inputImageMat.height());
    int inputWidth = (int) (inputImageMat.width() * ratio);
    int inputHeight = (int) (inputImageMat.height() * ratio);
    System.out.println("WIDTH: " + inputWidth + " HEIGHT:" + inputHeight);

    //        inputImageMat = image;
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-02.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-03.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-04.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\data1\\sc-05.jpg");
    //        Mat inputImageMat = Highgui.imread("D:\\OpenCV_Library\\resources\\Scan_Img\\image\\web001.png");
    Debug.debugLog("[Image [Cols, Rows]: [" + inputImageMat.cols() + ", " + inputImageMat.rows() + "]]");
    //        imshow("Original", inputImageMat);
    ViewerUI.show("Original", inputImageMat, ViewableUI.SHOW_ORIGINAL);
    //        ViewerUI.show("Original-Histogram", Histogram.getHistogram(inputImageMat), ViewableUI.SHOW_HISTOGRAM_ORIGINAL);

    // Do some image processing on the image and display in another window.
    Mat filteredImage = new Mat();
    /**
     * We have explained some filters which main goal is to smooth an input
     * image. However, sometimes the filters do not only dissolve the noise,
     * but also smooth away the edges
     */
    //        Imgproc.bilateralFilter(inputImageMat, m2, -1, 50, 10); /*Previous line for noise filtering*/
    Imgproc.bilateralFilter(inputImageMat, filteredImage, -1, 50, 10);
    //        Imgproc.bilateralFilter(inputImageMat, filteredImage, -1, 150, 11);

    ViewerUI.show("Noise Filter", filteredImage, ViewableUI.SHOW_NOISE_FILTER);
    //        ViewerUI.show("Noise Filter-Histogram", Histogram.getHistogram(filteredImage), ViewableUI.SHOW_HISTOGRAM_NOISE_FILTER);
    Imgproc.Canny(filteredImage, filteredImage, 10, 150);
    //        Imgproc.bilateralFilter(filteredImage, filteredImage, -1, 50, 10);
    //        Imgproc.threshold(filteredImage, filteredImage, 250, 300,Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C );
    //Imgproc.cvtColor(m1, m1, Imgproc.COLOR_RGB2GRAY, 0);
    //        imshow("Edge Detected", m2);
    ViewerUI.show("Edge Detected", filteredImage, ViewableUI.SHOW_EDGE_DETECTION);
    //        ViewerUI.show("Edge Detected-Histogram", Histogram.getHistogram(filteredImage), ViewableUI.SHOW_HISTOGRAM_EDGE_DETECTION);

    Size sizeA = filteredImage.size();
    System.out.println("Width: " + sizeA.width + " Height: " + sizeA.height);
    int width = (int) sizeA.width;
    int height = (int) sizeA.height;
    int pointLength[][][] = new int[height][width][2];
    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            //double[] data = m2.get(i, j);
            if (filteredImage.get(i, j)[0] != 0) {
                pointLength[i][j][0] = 0;
                pointLength[i][j][1] = 0;
                continue;
            }
            if (j != 0 && filteredImage.get(i, j - 1)[0] == 0) {
                pointLength[i][j][0] = pointLength[i][j - 1][0];
            } else {
                int count = 0;
                for (int k = j + 1; k < width; k++) {
                    if (filteredImage.get(i, k)[0] == 0) {
                        count++;
                    } else {
                        break;
                    }
                }
                pointLength[i][j][0] = count;
            }
            if (i != 0 && filteredImage.get(i - 1, j)[0] == 0) {
                pointLength[i][j][1] = pointLength[i - 1][j][1];
            } else {
                int count = 0;
                for (int k = i + 1; k < height; k++) {
                    if (filteredImage.get(k, j)[0] == 0) {
                        count++;
                    } else {
                        break;
                    }
                }
                pointLength[i][j][1] = count;
            }
        }
    }
    String temp = "";
    Mat convertArea = filteredImage.clone();

    int[][] blackWhite = new int[height][width];

    for (int i = 0; i < height; i++) {
        temp = "";
        for (int j = 0; j < width; j++) {
            if (i == 0 || j == 0 || i == height - 1 || j == width - 1) {
                temp = temp + "@";
                blackWhite[i][j] = 1;

                double[] data = filteredImage.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else if (pointLength[i][j][0] > 150 && pointLength[i][j][1] > 6) {
                temp = temp + "@";
                blackWhite[i][j] = 1;

                double[] data = filteredImage.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else if (pointLength[i][j][0] > 7 && pointLength[i][j][1] > 200) {
                temp = temp + "@";
                blackWhite[i][j] = 1;

                double[] data = filteredImage.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else {
                temp = temp + " ";
                blackWhite[i][j] = 0;

                double[] data = filteredImage.get(i, j);
                data[0] = 0.0;
                convertArea.put(i, j, data);
            }

        }
    }
    ViewerUI.show("Convertion", convertArea, ViewableUI.SHOW_CONVERSION);
    //        ViewerUI.show("Convertion-Histogram", Histogram.getHistogram(convertArea), ViewableUI.SHOW_HISTOGRAM_CONVERSION);

    ImageDetection isImage = new ImageDetection();
    HeadlineDetection isHeadline = new HeadlineDetection();

    ImageBorderDetectionBFS imgBFS = new ImageBorderDetectionBFS();
    ArrayList<BorderItem> borderItems = imgBFS.getBorder(blackWhite, width, height, filteredImage,
            inputImageMat);
    // Mat[] subMat = new Mat[borderItems.size()];
    //        for (int i = 0; i < borderItems.size(); i++) {
    //            subMat[i] = m2.submat(borderItems.get(i).getMinX(), borderItems.get(i).getMaxX(),
    //                    borderItems.get(i).getMinY(), borderItems.get(i).getMaxY());
    //            if (isImage.isImage(subMat[i])) {
    //                System.out.println("subMat" + i + " is an image");
    //                imshow("Image" + i, subMat[i]);
    //
    //            }else if(isHeadline.isHeadLine(subMat[i])){
    //                System.out.println("subMat" + i + " is an Headline");
    //                imshow("Headline" + i, subMat[i]);
    //            }else{
    //                System.out.println("subMat" + i + " is an Column");
    //                imshow("Column" + i, subMat[i]);
    //            }
    //            //imshow("subMat" + i, subMat[i]);
    //            bw.close();
    //
    //        }

    boolean[] imageIndexer = new boolean[borderItems.size()];
    int[] lineHeight = new int[borderItems.size()];
    int highestLinheight = -1, lowestLineHeight = 10000;
    int totalHeight = 0, notImage = 0;

    for (int i = 0; i < borderItems.size(); i++) {
        lineHeight[i] = 0;
        BorderItem borderItem = borderItems.get(i);
        //            subMat[i] = m2.submat(borderItems.get(i).getMinX(), borderItems.get(i).getMaxX(),
        //                    borderItems.get(i).getMinY(), borderItems.get(i).getMaxY());
        //            if (isImage.isImage(subMat[i])) {
        //                System.out.println("subMat" + i + " is an image");
        //                imshow("Image" + i, subMat[i]);
        //                imageIndexer[i] = true;
        //                continue;
        //            }else{
        //                notImage++;
        //                imageIndexer[i] = false;
        //            }
        if (borderItem.getIsImage()) {
            System.out.println("subMat" + i + " is an image");
            //                imshow("Image" + i, borderItem.getBlock());
            ViewerUI.show("Image" + i, borderItem.getBlock(), ViewableUI.SHOW_IMAGE);
            //                ViewerUI.show("Image-Histogram" + i, Histogram.getHistogram(borderItem.getBlock()), ViewableUI.SHOW_HISTOGRAM_IMAGE);

            imageIndexer[i] = true;
            continue;
        } else {
            notImage++;
            imageIndexer[i] = false;
        }

        //            totalHeight += lineHeight[i] = getLineHeight(subMat[i]);
        Mat fake = new Mat();
        Imgproc.cvtColor(borderItem.getBlock(), fake, Imgproc.COLOR_RGB2GRAY, 0);
        totalHeight += lineHeight[i] = getLineHeight(fake);
        fake.release();
        System.out.println("line height " + i + ": " + lineHeight[i]);
        //            imshow("" + i, borderItems.get(i).getBlock());
        if (lineHeight[i] > highestLinheight) {
            highestLinheight = lineHeight[i];
        }
        if (lineHeight[i] < lowestLineHeight) {
            lowestLineHeight = lineHeight[i];
        }

        //            if(i==7)
        //                break;
    }

    int avgLineHeight = totalHeight / notImage;

    for (int i = 0; i < borderItems.size(); i++) {
        if (!imageIndexer[i]) {
            if (lineHeight[i] - lowestLineHeight > 13 && lineHeight[i] >= 45) {
                //                    imshow("Headline" + i, subMat[i]);
                //                    imshow("Headline" + i, borderItems.get(i).getBlock());
                ViewerUI.show("Headline" + i, borderItems.get(i).getBlock(), ViewableUI.SHOW_HEADING);
                //                    ViewerUI.show("Headline-Histogram" + i, Histogram.getHistogram(borderItems.get(i).getBlock()), ViewableUI.SHOW_HISTOGRAM_HEADING);

            } else if (lineHeight[i] - lowestLineHeight > 8 && lineHeight[i] >= 21 && lineHeight[i] < 45) {
                //                    imshow("Sub Headline" + i, borderItems.get(i).getBlock());
                ViewerUI.show("Sub Headline" + i, borderItems.get(i).getBlock(), ViewableUI.SHOW_SUB_HEADING);
                //                    ViewerUI.show("Sub Headline-Histogram" + i, Histogram.getHistogram(borderItems.get(i).getBlock()), ViewableUI.SHOW_HISTOGRAM_SUB_HEADING);

            } else {
                //                    imshow("Column" + i, subMat[i]);
                //                    imshow("Column" + i, borderItems.get(i).getBlock());
                ViewerUI.show("Column" + i, borderItems.get(i).getBlock(), ViewableUI.SHOW_COLUMN);
                //                    ViewerUI.show("Column-Histogram" + i, Histogram.getHistogram(borderItems.get(i).getBlock()), ViewableUI.SHOW_HISTOGRAM_COLUMN);

            }
        }
    }

}

From source file:edu.sust.cse.analysis.news.NewsAnalysis.java

private static int getLineHeight(Mat subMat) {
    int lineHeight = 0;
    float width = subMat.width();
    float height = subMat.height();

    if (height < 5 || width < 5) {
        return lineHeight;
    }//from   w ww .j  a  va 2  s  .  c o  m

    int start = -1, end = -1, biggest = -1;
    //        String blacks= "";

    for (int i = 0; i < height; i++) {
        int white = 0;
        for (int j = 0; j < width; j++) {

            if (subMat.get(i, j)[0] <= 140) {
                white++;
                if (start == -1) {
                    start = i;
                }
                //                    blacks +="1";
                //                    break;
            } else {
                //                    blacks +="0";
            }
        }
        //            blacks += "\n";

        //            if(white==0){
        //                for(int j=0; j<width; j++){
        //                    double[] data = subMat.get(i, j);
        //                    if(data != null){
        //                        data[0] = 0.0;
        //                        subMat.put(i, j, data);
        //                    }
        //                }
        //            }
        //            if(biggest < white){
        //                biggest = white;
        //            }
        //            System.out.println(blacks);
        if (white == 0 && start != -1) {
            if ((i - 1 - start) < 5) {
                lineHeight = i - 1 - start;
                start = -1;
                continue;
            }

            if (end == -1) {
                end = i - 1;
            }
            lineHeight = end - start;
            break;
        }

        if (i == height - 1 && end == -1) {
            end = i;
            lineHeight = end - start;
        }
    }
    //        System.out.println("start: "+start);
    //            System.out.println("end: "+end);
    //        if(lineHeight == 50){
    //        filewrile(blacks);
    //        filewrile("\n\n\n\n\n\n\n\n");
    //        }
    return lineHeight;

    // Read image as before
    //        Mat rgba = subMat.clone();
    ////        Imgproc.cvtColor(rgba, rgba, Imgproc.COLOR_RGB2GRAY, 0);
    //
    //        // Create an empty image in matching format
    //        BufferedImage gray = new BufferedImage(rgba.width(), rgba.height(), BufferedImage.TYPE_BYTE_GRAY);
    //
    //        // Get the BufferedImage's backing array and copy the pixels directly into it
    //        byte[] data = ((DataBufferByte) gray.getRaster().getDataBuffer()).getData();
    //        rgba.get(0, 0, data);
    //
    //  return largestBlackBatch1(gray)[1];
}

From source file:edu.sust.cse.util.Histogram.java

public static Mat getHistogram(Mat image) {

    try {//  www .  j av  a 2 s  .c  om
        Mat src = new Mat(image.height(), image.width(), CvType.CV_8UC2);
        Imgproc.cvtColor(image, src, Imgproc.COLOR_RGB2GRAY);
        ArrayList<Mat> bgr_planes = new ArrayList<>();
        Core.split(src, bgr_planes);

        MatOfInt histSize = new MatOfInt(256);

        final MatOfFloat histRange = new MatOfFloat(0f, 256f);

        boolean accumulate = false;

        Mat b_hist = new Mat();

        Imgproc.calcHist(bgr_planes, new MatOfInt(0), new Mat(), b_hist, histSize, histRange, accumulate);

        int hist_w = 512;
        int hist_h = 600;
        long bin_w;
        bin_w = Math.round((double) (hist_w / 256));

        Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC1);

        Core.normalize(b_hist, b_hist, 3, histImage.rows(), Core.NORM_MINMAX);

        for (int i = 1; i < 256; i++) {

            Core.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(b_hist.get(i - 1, 0)[0])),
                    new Point(bin_w * (i), hist_h - Math.round(Math.round(b_hist.get(i, 0)[0]))),
                    new Scalar(255, 0, 0), 2, 8, 0);

        }

        return histImage;
    } catch (Exception ex) {
        System.out.println("[HISTOGRAM][ERROR][" + ex.getMessage() + "]");
        return null;
    }
}

From source file:edu.wpi.cscore.RawCVMatSource.java

License:Open Source License

/**
 * Put an OpenCV image and notify sinks.
 *
 * <p>Only 8-bit single-channel or 3-channel (with BGR channel order) images
 * are supported. If the format, depth or channel order is different, use
 * Mat.convertTo() and/or cvtColor() to convert it first.
 *
 * @param image OpenCV image//from  w  w  w .  j  a  va 2  s. com
 */
public void putFrame(Mat image) {
    int channels = image.channels();
    if (channels != 1 && channels != 3) {
        throw new VideoException("Unsupported Image Type");
    }
    int imgType = channels == 1 ? PixelFormat.kGray.getValue() : PixelFormat.kBGR.getValue();
    CameraServerJNI.putRawSourceFrame(m_handle, image.dataAddr(), image.width(), image.height(), imgType,
            (int) image.total() * channels);
}

From source file:emotion.Eye.java

public Eye(Mat _face) {
    reg = null;/*from   www  .jav a 2s  .co m*/
    CascadeClassifier eyes_cascade;
    eyes_cascade = new CascadeClassifier(
            "E:\\Studia\\OpenCV\\opencv\\sources\\data\\haarcascades\\haarcascade_eye.xml");

    //Detect faces and write eyeLine to array of rectangles
    MatOfRect eyes = new MatOfRect();
    eyes_cascade.detectMultiScale(_face, eyes);

    if (eyes.toArray().length == 0) {
        Logger.getLogger("No face found in the image!");
        return;
    }
    for (int i = 0; i < eyes.toList().size(); ++i) {
        Rect tempRect = eyes.toArray()[i].clone();

        if (tempRect.x < _face.width() / 2) {
            Eye.leftRect = recalculate(tempRect, _face);
            Eye.leftEye = new Mat(_face, Eye.leftRect);
            imwrite("leftEYe.jpg", Eye.leftEye);
        } else {
            Eye.rightRect = recalculate(tempRect, _face);
            Eye.rightEye = new Mat(_face, Eye.rightRect);
            imwrite("rightEye.jpg", Eye.rightEye);
        }
    }
    templatingOuterCorner(Eye.leftEye, false);
    templatingInnerCorner(Eye.leftEye, false);
    templatingOuterCorner(Eye.rightEye, true);
    templatingInnerCorner(Eye.rightEye, true);
}

From source file:emotion.Eye.java

private void templatingOuterCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightOuter.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightOuter.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag/*from   www .  ja  v a 2s .  c  o m*/
            ? new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(9,9)- coordinates of eye outerCorner in the template
    if (rightEyeFlag) {
        imwrite("rightEyeForOuterTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(9,9)- coordinates of eye outerCorner in the template
        Point outerCorner = new Point(maxVal.maxLoc.x + 9, maxVal.maxLoc.y + 9);

        //Adjust coordinates according to whole face
        outerCorner.y += Eye.rightRect.y;
        outerCorner.x += Eye.rightRect.x;
        outerCorner.x += temp.width(); //We examine just right half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightOuterEyeCorner = outerCorner;
    } else {
        imwrite("leftEyeForOuterTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point outerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 9);
        //Adjust coordinates according to whole face
        outerCorner.y += Eye.leftRect.y;
        outerCorner.x += Eye.leftRect.x;
        ////////////////////////////////////////////
        EyeRegion.leftOuterEyeCorner = outerCorner;
    }
    //Mat tempw=reg._face.clone();
    //Face.drawCross(tempw, outerCorner);
    //imwrite("checkcorner.png",tempw);

}

From source file:emotion.Eye.java

private void templatingInnerCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightInner.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightInner.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag ? new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(4,7)- coordinates of eye innerCorner in the template
    if (rightEyeFlag) {
        imwrite("template4righteye.jpg", template);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(4,7)- coordinates of eye innerCorner in the template
        Point innerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 7);

        StaticFunctions.drawCross(temp, innerCorner, StaticFunctions.Features.EYE_CORNERS);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        //Adjust coordinates according to whole face
        innerCorner.y += Eye.rightRect.y;
        innerCorner.x += Eye.rightRect.x;
        //We examine just left half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightInnerEyeCorner = innerCorner;
    } else {/*from  w  ww .  j a v  a  2  s .  c  om*/
        imwrite("leftEyeForInnerTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point innerCorner = new Point(maxVal.maxLoc.x + 8, maxVal.maxLoc.y + 7);

        //Adjust coordinates according to whole face
        innerCorner.y += Eye.leftRect.y;
        innerCorner.x += Eye.leftRect.x;
        //We examine just right half on the left eye
        innerCorner.x += temp.width();
        ////////////////////////////////////////////
        EyeRegion.leftInnerEyeCorner = innerCorner;
    }
}

From source file:emotion.Eye.java

public void examineEyeOpeness(boolean rightEyeFlag) {
    Rect pureEyeRegion;//from  w  w  w.j a v  a2  s .  c o  m
    //We take just middle half of strict eye region determined
    //by localized eye corners
    if (rightEyeFlag) {
        double regionWidth = EyeRegion.rightOuterEyeCorner.x - EyeRegion.rightInnerEyeCorner.x;
        pureEyeRegion = new Rect((int) (EyeRegion.rightInnerEyeCorner.x + regionWidth / 2 - 2),
                (int) (Eye.rightRect.y), (4), Eye.rightRect.height);
        imwrite("strictEyeRegRight.jpg", new Mat(EyeRegion._face, pureEyeRegion));
        //Setting x coordinates of eyelids
        EyeRegion.rightLowerEyelid.x = (EyeRegion.rightOuterEyeCorner.x + EyeRegion.rightInnerEyeCorner.x) / 2;
        EyeRegion.rightUpperEyelid.x = EyeRegion.rightLowerEyelid.x;
        EyeRegion.rightEyeOpeness = (EyeRegion.rightUpperEyelid.y - EyeRegion.rightLowerEyelid.y);
    } else {
        double regionWidth;
        regionWidth = EyeRegion.leftInnerEyeCorner.x - EyeRegion.leftOuterEyeCorner.x;
        pureEyeRegion = new Rect((int) (regionWidth / 2 + EyeRegion.leftOuterEyeCorner.x - 2),
                (int) (Eye.leftRect.y), (4), Eye.leftRect.height);
        imwrite("leftEyeReg.jpg", new Mat(EyeRegion._face, pureEyeRegion));
        //Setting x coordinates of eyelids
        EyeRegion.leftLowerEyelid.x = (EyeRegion.leftInnerEyeCorner.x + EyeRegion.leftOuterEyeCorner.x) / 2;
        EyeRegion.leftUpperEyelid.x = EyeRegion.leftLowerEyelid.x;
        EyeRegion.leftEyeOpeness = (EyeRegion.leftUpperEyelid.y - EyeRegion.leftLowerEyelid.y);
    }

    Mat strictEyeRegion = new Mat(EyeRegion._face, pureEyeRegion);
    Mat result = new Mat();

    strictEyeRegion.convertTo(strictEyeRegion, CvType.CV_32F);
    Core.pow(strictEyeRegion, 1.27, strictEyeRegion);
    cvtColor(strictEyeRegion, strictEyeRegion, Imgproc.COLOR_BGR2GRAY);
    imwrite("improved.jpg", strictEyeRegion);

    threshold(strictEyeRegion, result, 100, 255, Imgproc.THRESH_BINARY_INV);

    Mat strEl = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 1));
    dilate(result, result, strEl, new Point(1, 0), 3);

    for (int i = 0; i < result.width(); i++) {
        for (int j = 0; j < result.height() * 0.4; j++) {
            result.put(j, i, new double[] { 0, 0, 0 });
        }
    }
    for (int j = result.height() - 1; j >= 0; j--) {
        if (result.get(j, 0)[0] == 255) {
            if (rightEyeFlag) {

                if (EyeRegion.rightLowerEyelid.y == 0) {
                    EyeRegion.rightLowerEyelid.y = j + 3;
                    EyeRegion.rightLowerEyelid.y += Eye.rightRect.y;
                }
                EyeRegion.rightUpperEyelid.y = j;
                EyeRegion.rightUpperEyelid.y += Eye.rightRect.y;
            } else {
                if (EyeRegion.leftLowerEyelid.y == 0) {
                    EyeRegion.leftLowerEyelid.y = j + 3;
                    EyeRegion.leftLowerEyelid.y += Eye.leftRect.y;
                }
                EyeRegion.leftUpperEyelid.y = j;
                EyeRegion.leftUpperEyelid.y += Eye.leftRect.y;
            }
        }
    }
    imwrite("openessResult.jpg", result);
}

From source file:emotion.Eye.java

private Rect recalculate(Rect _input, Mat canvas) {
    Rect output = new Rect();
    int width = (int) (_input.width * 1.3);
    int height = (int) (_input.height * 1.3);
    output.x = _input.x - (width - _input.width) / 2;
    output.y = _input.y - (height) / 4;//from ww  w.ja  v a 2s.  c om
    if (output.x < 0) {
        output.x = 0;
    } else if (output.x >= canvas.width()) {
        output.x = canvas.width() - 1;
    }
    if (output.y < 0) {
        output.y = 0;
    } else if (output.y >= canvas.height()) {
        output.y = canvas.height() - 1;
    }
    output.width = width;
    output.height = height;
    return output;
}

From source file:emotion.Eyebrow.java

public static void Harris(Mat img, boolean rightEyeFlag) {
    //Harris point extraction
    Mat harrisTestimg;//from   w w  w .ja  v  a2s . c  o m
    harrisTestimg = img.clone();
    cvtColor(harrisTestimg, harrisTestimg, Imgproc.COLOR_BGR2GRAY);
    threshold(harrisTestimg, harrisTestimg, 200, 255, Imgproc.THRESH_BINARY_INV);
    Mat struct = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3));
    erode(harrisTestimg, harrisTestimg, struct);
    dilate(harrisTestimg, harrisTestimg, struct);
    imwrite("intermediateHaaris.jpg", harrisTestimg);
    harrisTestimg.convertTo(harrisTestimg, CV_8UC1);
    ArrayList<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    Imgproc.findContours(harrisTestimg, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE);

    //System.out.println("Average Y for contours:");
    float[] averageY = new float[contours.size()];
    for (int i = 0; i < contours.size(); ++i) {
        //We calculate mean of Y coordinates for each contour
        for (int j = 0; j < contours.get(i).total(); ++j) {
            int val = (int) contours.get(i).toArray()[j].y;
            averageY[i] += val;
        }
        averageY[i] /= contours.get(i).total();
        //System.out.println(i+") "+averageY[i]);

        if (averageY[i] <= img.height() / 2 && //We consider just up half of an image
                contours.get(i).total() >= img.width()) //and longer than threshold
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(255, 255, 255));
        else
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(0, 0, 0));
    }

    MatOfPoint features = new MatOfPoint();
    Imgproc.goodFeaturesToTrack(harrisTestimg, features, 100, 0.00001, 0);

    //We draw just 2 extreme points- first and last
    Point eyebrowsPoints[] = new Point[2];
    for (int i = 0; i < features.toList().size(); i++) {
        if (i == 0) {
            eyebrowsPoints[0] = new Point(harrisTestimg.width() / 2, 0);
            eyebrowsPoints[1] = new Point(harrisTestimg.width() / 2, 0);
        }
        if (features.toArray()[i].x < eyebrowsPoints[0].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[0] = features.toArray()[i];
        }
        if (features.toArray()[i].x > eyebrowsPoints[1].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[1] = features.toArray()[i];
        }
    }
    StaticFunctions.drawCross(img, eyebrowsPoints[1], StaticFunctions.Features.EYEBROWS_ENDS);
    StaticFunctions.drawCross(img, eyebrowsPoints[0], StaticFunctions.Features.EYEBROWS_ENDS);
    imwrite("testHaris.jpg", img);
    if (rightEyeFlag) {
        EyeRegion.rightInnerEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.rightInnerEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightInnerEyebrowsCorner.y += Eye.rightRect.y;

        EyeRegion.rightOuterEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.rightOuterEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightOuterEyebrowsCorner.y += Eye.rightRect.y;
    } else {
        EyeRegion.leftInnerEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.leftInnerEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftInnerEyebrowsCorner.y += Eye.leftRect.y;

        EyeRegion.leftOuterEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.leftOuterEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftOuterEyebrowsCorner.y += Eye.leftRect.y;
    }
}