Example usage for org.opencv.core Mat cols

List of usage examples for org.opencv.core Mat cols

Introduction

In this page you can find the example usage for org.opencv.core Mat cols.

Prototype

public int cols() 

Source Link

Usage

From source file:LetsStart.utils.ImageViewer.java

public Image toBufferedImage(Mat matrix) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (matrix.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//from  www .ja  v a 2s  .c om
    int bufferSize = matrix.channels() * matrix.cols() * matrix.rows();
    byte[] buffer = new byte[bufferSize];
    matrix.get(0, 0, buffer); // get all the pixels
    BufferedImage image = new BufferedImage(matrix.cols(), matrix.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(buffer, 0, targetPixels, 0, buffer.length);
    return image;
}

From source file:logic.featurepointextractor.EyeBrowsFPE.java

/**
 * getSkeleton  obtain thin 1-pixel region from contour. 
 * @param src   input binary image//from   w  w w .j a va  2s  .  c  o m
 * @return      binary image 
 */

private Mat getSkeleton(Mat src) {
    Mat skel = new Mat(src.rows(), src.cols(), CV_8UC1, new Scalar(0));
    Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3));
    Mat tmp = new Mat();
    Mat eroded = new Mat();
    boolean done = false;

    do {
        Imgproc.morphologyEx(src, eroded, Imgproc.MORPH_ERODE, element);
        Imgproc.morphologyEx(eroded, tmp, Imgproc.MORPH_DILATE, element);
        Core.subtract(src, tmp, tmp);
        Core.bitwise_or(skel, tmp, skel);
        eroded.copyTo(src);

        done = (Core.countNonZero(src) == 0);
    } while (!done);

    return skel;
}

From source file:logic.featurepointextractor.MouthFPE.java

/**
 * Detect mouth feature points//from  w ww  .j  a v  a2 s .  co m
 * Algorithm:           Equalize histogram of mouth rect
 *                      Implement Sobel horizontal filter
 *                      Find corners
 *                      Invert color + Binarization
 *                      Find lip up and down points
 * @param mc
 * @return 
 */
@Override
public Point[] detect(MatContainer mc) {
    /**Algorithm
     *                  find pix(i) = (R-G)/R
     *                  normalize: 2arctan(pix(i))/pi
     */

    //find pix(i) = (R-G)/R
    Mat mouthRGBMat = mc.origFrame.submat(mc.mouthRect);
    List mouthSplitChannelsList = new ArrayList<Mat>();
    Core.split(mouthRGBMat, mouthSplitChannelsList);
    //extract R-channel
    Mat mouthR = (Mat) mouthSplitChannelsList.get(2);
    mouthR.convertTo(mouthR, CvType.CV_64FC1);
    //extract G-channel
    Mat mouthG = (Mat) mouthSplitChannelsList.get(1);
    mouthG.convertTo(mouthG, CvType.CV_64FC1);
    //calculate (R-G)/R
    Mat dst = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1);
    mc.mouthProcessedMat = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1);

    Core.absdiff(mouthR, mouthG, dst);
    //        Core.divide(dst, mouthR, mc.mouthProcessedMat);
    mc.mouthProcessedMat = dst;
    mc.mouthProcessedMat.convertTo(mc.mouthProcessedMat, CvType.CV_8UC1);
    Imgproc.equalizeHist(mc.mouthProcessedMat, mc.mouthProcessedMat);
    //       Imgproc.blur(mc.mouthProcessedMat, mc.mouthProcessedMat, new Size(4,4));
    //        Imgproc.morphologyEx(mc.mouthProcessedMat, mc.mouthProcessedMat, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(4,4)));
    Imgproc.threshold(mc.mouthProcessedMat, mc.mouthProcessedMat, 230, 255, THRESH_BINARY);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(mc.mouthProcessedMat, contours, new Mat(), Imgproc.RETR_TREE,
            Imgproc.CHAIN_APPROX_SIMPLE);

    //find the biggest contour
    int maxSize = -1;
    int tmpSize = -1;
    int index = -1;

    Rect centMouthRect = new Rect(mc.mouthRect.x + mc.mouthRect.width / 4,
            mc.mouthRect.y + mc.mouthRect.height / 4, mc.mouthRect.width / 2, mc.mouthRect.height / 2);
    if (contours.size() != 0) {
        maxSize = contours.get(0).toArray().length;
        tmpSize = 0;
        index = 0;
    }

    //find max contour
    for (int j = 0; j < contours.size(); ++j) {
        //if contour is vertical, exclude it 
        Rect boundRect = Imgproc.boundingRect(contours.get(j));
        int centX = mc.mouthRect.x + boundRect.x + boundRect.width / 2;
        int centY = mc.mouthRect.y + boundRect.y + boundRect.height / 2;
        //                LOG.info("Center = " + centX + "; " + centY);
        //                LOG.info("Rect = " + centMouthRect.x + "; " + centMouthRect.y);
        if (!centMouthRect.contains(new Point(centX, centY)))
            continue;

        tmpSize = contours.get(j).toArray().length;

        LOG.info("Contour " + j + "; size = " + tmpSize);

        if (tmpSize > maxSize) {
            maxSize = tmpSize;
            index = j;
        }
    }

    //appproximate curve
    Point[] p1 = contours.get(index).toArray();
    MatOfPoint2f p2 = new MatOfPoint2f(p1);
    MatOfPoint2f p3 = new MatOfPoint2f();
    Imgproc.approxPolyDP(p2, p3, 1, true);

    p1 = p3.toArray();

    MatOfInt tmpMatOfPoint = new MatOfInt();
    Imgproc.convexHull(new MatOfPoint(p1), tmpMatOfPoint);

    Rect boundRect = Imgproc.boundingRect(new MatOfPoint(p1));
    if (boundRect.area() / mc.mouthRect.area() > 0.3)
        return null;

    int size = (int) tmpMatOfPoint.size().height;
    Point[] _p1 = new Point[size];
    int[] a = tmpMatOfPoint.toArray();

    _p1[0] = new Point(p1[a[0]].x + mc.mouthRect.x, p1[a[0]].y + mc.mouthRect.y);
    Core.circle(mc.origFrame, _p1[0], 3, new Scalar(0, 0, 255), -1);
    for (int i = 1; i < size; i++) {
        _p1[i] = new Point(p1[a[i]].x + mc.mouthRect.x, p1[a[i]].y + mc.mouthRect.y);
        Core.circle(mc.origFrame, _p1[i], 3, new Scalar(0, 0, 255), -1);
        Core.line(mc.origFrame, _p1[i - 1], _p1[i], new Scalar(255, 0, 0), 2);
    }
    Core.line(mc.origFrame, _p1[size - 1], _p1[0], new Scalar(255, 0, 0), 2);

    /*        contours.set(index, new MatOfPoint(_p1));
            
            mc.mouthProcessedMat.setTo(new Scalar(0));
                    
            Imgproc.drawContours(mc.mouthProcessedMat, contours, index, new Scalar(255), -1);
                    
    */ mc.mouthMatOfPoint = _p1;

    MatOfPoint matOfPoint = new MatOfPoint(_p1);
    mc.mouthBoundRect = Imgproc.boundingRect(matOfPoint);
    mc.features.mouthBoundRect = mc.mouthBoundRect;

    /**extract feature points:  1 most left
     *                          2 most right
     *                          3,4 up
     *                          5,6 down
     */

    //        mc.mouthMatOfPoint = extractFeaturePoints(contours.get(index));

    return null;
}

From source file:logic.helpclass.Util.java

/**
 * Track template within the image/*from   w w w . j av  a2 s.  c  o  m*/
 * @param grayFrame
 * @param rect
 * @param temp
 * @return 
 */
static public Rect trackTemplate(Mat grayFrame, Rect rect, Mat temp) {
    Rect searchRect = new Rect(new Point(rect.x - rect.width / 2, rect.y - rect.height / 2),
            new Point(rect.x + rect.width * 3 / 2, rect.y + rect.height * 3 / 2));

    Mat dst = new Mat(searchRect.width - temp.width() + 1, searchRect.height - temp.height() + 1, CV_32FC1);

    if ((searchRect.x < 0 || searchRect.y < 0) || (searchRect.x + searchRect.width > grayFrame.cols()
            || searchRect.y + searchRect.height > grayFrame.rows()))
        return null;

    Imgproc.matchTemplate(grayFrame.submat(searchRect), temp, dst, Imgproc.TM_SQDIFF_NORMED);

    Core.MinMaxLocResult result = Core.minMaxLoc(dst);

    //check new location: if coordinates change so variously, remain previous location
    if (true) {
        rect.x = (int) (searchRect.x + result.minLoc.x);
        rect.y = (int) (searchRect.y + result.minLoc.y);
        return rect;
    } else {
        return null;
    }
}

From source file:Main.Camera.CameraController.java

public static BufferedImage matToBufferedImage(Mat matrix, BufferedImage bimg) {
    if (matrix != null) {
        int cols = matrix.cols();
        int rows = matrix.rows();
        int elemSize = (int) matrix.elemSize();
        byte[] data = new byte[cols * rows * elemSize];
        int type;
        matrix.get(0, 0, data);//from ww w .  j  a va2s  .c  om
        switch (matrix.channels()) {
        case 1:
            type = BufferedImage.TYPE_BYTE_GRAY;
            break;
        case 3:
            type = BufferedImage.TYPE_3BYTE_BGR;
            // bgr to rgb  
            byte b;
            for (int i = 0; i < data.length; i = i + 3) {
                b = data[i];
                data[i] = data[i + 2];
                data[i + 2] = b;
            }
            break;
        default:
            return null;
        }

        // Reuse existing BufferedImage if possible
        if (bimg == null || bimg.getWidth() != cols || bimg.getHeight() != rows || bimg.getType() != type) {
            bimg = new BufferedImage(cols, rows, type);
        }
        bimg.getRaster().setDataElements(0, 0, cols, rows, data);
    } else { // mat was null
        bimg = null;
    }
    return bimg;
}

From source file:main.Utils.java

public BufferedImage convertMatToImage(Mat mat) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (mat.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from  ww w  .  j  a va2 s .c  o m*/

    int bufferSize = mat.channels() * mat.cols() * mat.rows();
    byte[] bytes = new byte[bufferSize];
    mat.get(0, 0, bytes);
    BufferedImage imagem = new BufferedImage(mat.cols(), mat.rows(), type);
    byte[] targetPixels = ((DataBufferByte) imagem.getRaster().getDataBuffer()).getData();
    System.arraycopy(bytes, 0, targetPixels, 0, bytes.length);
    return imagem;
}

From source file:model.grayscaleClass.java

/**
 *
 * @param image/*from  www.  j  a  va2 s  . c  o m*/
* @return
 */
public File imagePreprocessing(String image, String ex) {
    BufferedImage bImge = null;
    BufferedImage bImage2 = null;
    File grayscle = null;

    try {

        // loadOpenCV_Lib();
        //String path = "opencv\\build\\java\\x64\\opencv_java300.dll";
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        FileInputStream fileName = new FileInputStream(
                "C:\\bimla\\Dev\\java\\OCRSystem\\WebContent\\uploadedImage\\" + image);
        InputStream input = fileName;
        bImge = ImageIO.read(input);
        byte[] imgeByte = ((DataBufferByte) bImge.getRaster().getDataBuffer()).getData();
        Mat mat1 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC3);
        mat1.put(0, 0, imgeByte);
        Mat mat2 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC1);
        Imgproc.cvtColor(mat1, mat2, Imgproc.COLOR_RGB2GRAY);
        byte[] imageData = new byte[mat2.rows() * mat2.cols() * (int) (mat2.elemSize())];
        mat2.get(0, 0, imageData);
        bImage2 = new BufferedImage(mat2.cols(), mat2.rows(), BufferedImage.TYPE_BYTE_GRAY);
        bImage2.getRaster().setDataElements(0, 0, mat2.cols(), mat2.rows(), imageData);

        String extn = null;
        /*   switch (extnsn) {
               case ".jpg":
           extn = "jpg";
           break;
               case ".png":
           extn = "png";
           break;
               case ".pdf":
           extn = "pdf";
           break;
               case ".tiff":
           extn = "tif";
           break;
                
           }*/
        //writing the grayscale image to the folder
        grayscle = new File(
                "C:\\bimla\\Dev\\java\\OCRSystem\\WebContent\\uploadedImage\\grayscale" + "." + "jpg");
        ImageIO.write(bImage2, "jpg", grayscle);
    } catch (IOException ex1) {
        System.out.println("" + ex1.getMessage());
    } catch (Exception ex1) {
        Logger.getLogger(grayscaleClass.class.getName()).log(Level.SEVERE, null, ex1);
    }
    return grayscle;

}

From source file:model.JointPDF.java

public JointPDF(Mat imgR, Mat imgO) {
    int x, y;/*from w  w w . j av a 2  s  . co  m*/
    double count_red, count_green, count_blue, total_red = 0, total_green = 0, total_blue = 0;
    PDF_red = new double[256][256];
    PDF_green = new double[256][256];
    PDF_blue = new double[256][256];

    // Reference Image = x, Other Image = y
    // Make Joint Histogram
    for (int i = 0; i < imgR.rows(); i++) {
        for (int j = 0; j < imgR.cols(); j++) {
            double[] rgbR = imgR.get(i, j);
            double[] rgbO = imgO.get(i, j);

            // Search for Blue PDF
            y = (int) rgbO[0];
            x = (int) rgbR[0];
            PDF_blue[y][x] += 1;

            // Search for Green PDF
            y = (int) rgbO[1];
            x = (int) rgbR[1];
            PDF_green[y][x] += 1;

            // Search for Red PDF
            y = (int) rgbO[2];
            x = (int) rgbR[2];
            PDF_red[y][x] += 1;
        }
    }

    //        System.out.println("ORIGINAL");
    //        for (int i = 0; i < 256; i++) {
    //            for (int j = 0; j < 256; j++) {
    //                if (PDF_blue[i][j] > 0) {
    //                    System.out.println("(" + i + "," + j + "):" + PDF_blue[i][j]);
    //                }
    //            }
    //        }
    // Divide all pixel with Max number of pixel
    for (int i = 0; i < 256; i++) {
        for (int j = 0; j < 256; j++) {
            count_blue = PDF_blue[i][j];
            count_green = PDF_green[i][j];
            count_red = PDF_red[i][j];

            if (count_blue != 0) {
                PDF_blue[i][j] = count_blue / imgR.total();
                total_blue += PDF_blue[i][j];
            }
            if (count_green != 0) {
                PDF_green[i][j] = count_green / imgR.total();
                total_green += PDF_green[i][j];
            }
            if (count_red != 0) {
                PDF_red[i][j] = count_red / imgR.total();
                total_red += PDF_red[i][j];
            }
        }
    }

    // Normalize all pixel so total sum pixel is equal to 1
    for (int i = 0; i < 256; i++) {
        for (int j = 0; j < 256; j++) {
            count_blue = PDF_blue[i][j];
            count_green = PDF_green[i][j];
            count_red = PDF_red[i][j];

            if (count_blue != 0) {
                PDF_blue[i][j] = count_blue / total_blue;
            }
            if (count_green != 0) {
                PDF_green[i][j] = count_green / total_green;
            }
            if (count_red != 0) {
                PDF_red[i][j] = count_red / total_red;
            }
        }
    }
    //        System.out.println("NORMALIZE");
    //        for (int i = 0; i < 256; i++) {
    //            for (int j = 0; j < 256; j++) {
    //                if (PDF_red[i][j] > 0) {
    //                    System.out.println("(" + i + "," + j + "):" + String.format("%.4f",PDF_red[i][j]));
    //                }
    //            }
    //        }
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

/**
 * @param imagePath   name of the orginal image
 * @param mapFilePath name of the orginal image's map file
 * @return//  w  ww . j a v  a2 s  . com
 */
public static List<Mat> getSegmentedRegions(String imagePath, String mapFilePath, boolean isBlackBg) {

    Mat org = getImageMat(imagePath);
    RegionMap regionMap = RegionMapFactory.getRegionMap(imagePath, mapFilePath);

    List<Mat> result = new ArrayList<Mat>();

    Mat map = regionMap.getMap();

    for (Integer label : regionMap.getLabels()) {

        List<Point> points = new ArrayList<Point>();

        for (int i = 0; i < map.rows(); i++) {
            for (int j = 0; j < map.cols(); j++) {

                double[] temp = map.get(i, j);
                if (temp[0] == label) {
                    // Warning! col=x=j , row=y=i
                    points.add(new Point(j, i));
                }
            }
        }

        Point[] arr = points.toArray(new Point[points.size()]);
        Rect rect = Imgproc.boundingRect(new MatOfPoint(arr));

        Mat region;
        if (isBlackBg) {
            region = getImageWithBlackBg(org, points).submat(rect);
        } else {
            region = org.submat(rect);
        }
        result.add(region);
    }

    return result;
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

/**
 * @param imagePath/*  w w  w .  ja  va2  s.  c om*/
 * @param mapFilePath
 * @param isBlackBg
 * @return
 */
public static List<ImageModel> getSegmentedRegionsAsImageModels(String imagePath, String mapFilePath,
        boolean isBlackBg) {

    Mat org = getImageMat(imagePath);
    RegionMap regionMap = RegionMapFactory.getRegionMap(imagePath, mapFilePath);

    List<ImageModel> result = new ArrayList<ImageModel>();

    Mat map = regionMap.getMap();

    for (Integer label : regionMap.getLabels()) {

        List<Point> points = new ArrayList<Point>();

        for (int i = 0; i < map.rows(); i++) {
            for (int j = 0; j < map.cols(); j++) {

                double[] temp = map.get(i, j);
                if (temp[0] == label) {
                    // Warning! col=x=j , row=y=i
                    points.add(new Point(j, i));
                }
            }
        }

        Point[] arr = points.toArray(new Point[points.size()]);

        Rect rect = null;
        try {
            rect = Imgproc.boundingRect(new MatOfPoint(arr));
        } catch (Exception ex) {
            logger.error("", ex);
            continue;
        }

        Mat region;
        if (isBlackBg) {
            region = getImageWithBlackBg(org, points).submat(rect);
        } else {
            region = org.submat(rect);
        }

        ImageModel imgModel = new ImageModel();
        imgModel.setMat(region);
        imgModel.setRelativeToOrg(rect);

        result.add(imgModel);
    }

    return result;
}