Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:logic.helpclass.Util.java

/**
 * Track template within the image//  www  .j a  va 2 s.  c  om
 * @param grayFrame
 * @param rect
 * @param temp
 * @return 
 */
static public Rect trackTemplate(Mat grayFrame, Rect rect, Mat temp) {
    Rect searchRect = new Rect(new Point(rect.x - rect.width / 2, rect.y - rect.height / 2),
            new Point(rect.x + rect.width * 3 / 2, rect.y + rect.height * 3 / 2));

    Mat dst = new Mat(searchRect.width - temp.width() + 1, searchRect.height - temp.height() + 1, CV_32FC1);

    if ((searchRect.x < 0 || searchRect.y < 0) || (searchRect.x + searchRect.width > grayFrame.cols()
            || searchRect.y + searchRect.height > grayFrame.rows()))
        return null;

    Imgproc.matchTemplate(grayFrame.submat(searchRect), temp, dst, Imgproc.TM_SQDIFF_NORMED);

    Core.MinMaxLocResult result = Core.minMaxLoc(dst);

    //check new location: if coordinates change so variously, remain previous location
    if (true) {
        rect.x = (int) (searchRect.x + result.minLoc.x);
        rect.y = (int) (searchRect.y + result.minLoc.y);
        return rect;
    } else {
        return null;
    }
}

From source file:Main.Camera.CameraController.java

public static BufferedImage matToBufferedImage(Mat matrix, BufferedImage bimg) {
    if (matrix != null) {
        int cols = matrix.cols();
        int rows = matrix.rows();
        int elemSize = (int) matrix.elemSize();
        byte[] data = new byte[cols * rows * elemSize];
        int type;
        matrix.get(0, 0, data);// w w w .j  ava  2s  .c  o m
        switch (matrix.channels()) {
        case 1:
            type = BufferedImage.TYPE_BYTE_GRAY;
            break;
        case 3:
            type = BufferedImage.TYPE_3BYTE_BGR;
            // bgr to rgb  
            byte b;
            for (int i = 0; i < data.length; i = i + 3) {
                b = data[i];
                data[i] = data[i + 2];
                data[i + 2] = b;
            }
            break;
        default:
            return null;
        }

        // Reuse existing BufferedImage if possible
        if (bimg == null || bimg.getWidth() != cols || bimg.getHeight() != rows || bimg.getType() != type) {
            bimg = new BufferedImage(cols, rows, type);
        }
        bimg.getRaster().setDataElements(0, 0, cols, rows, data);
    } else { // mat was null
        bimg = null;
    }
    return bimg;
}

From source file:main.Utils.java

public BufferedImage convertMatToImage(Mat mat) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (mat.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from ww  w. j  av a  2  s.  c  o  m*/

    int bufferSize = mat.channels() * mat.cols() * mat.rows();
    byte[] bytes = new byte[bufferSize];
    mat.get(0, 0, bytes);
    BufferedImage imagem = new BufferedImage(mat.cols(), mat.rows(), type);
    byte[] targetPixels = ((DataBufferByte) imagem.getRaster().getDataBuffer()).getData();
    System.arraycopy(bytes, 0, targetPixels, 0, bytes.length);
    return imagem;
}

From source file:model.grayscaleClass.java

/**
 *
 * @param image/* w w  w .j  a v a 2  s.  co m*/
* @return
 */
public File imagePreprocessing(String image, String ex) {
    BufferedImage bImge = null;
    BufferedImage bImage2 = null;
    File grayscle = null;

    try {

        // loadOpenCV_Lib();
        //String path = "opencv\\build\\java\\x64\\opencv_java300.dll";
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        FileInputStream fileName = new FileInputStream(
                "C:\\bimla\\Dev\\java\\OCRSystem\\WebContent\\uploadedImage\\" + image);
        InputStream input = fileName;
        bImge = ImageIO.read(input);
        byte[] imgeByte = ((DataBufferByte) bImge.getRaster().getDataBuffer()).getData();
        Mat mat1 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC3);
        mat1.put(0, 0, imgeByte);
        Mat mat2 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC1);
        Imgproc.cvtColor(mat1, mat2, Imgproc.COLOR_RGB2GRAY);
        byte[] imageData = new byte[mat2.rows() * mat2.cols() * (int) (mat2.elemSize())];
        mat2.get(0, 0, imageData);
        bImage2 = new BufferedImage(mat2.cols(), mat2.rows(), BufferedImage.TYPE_BYTE_GRAY);
        bImage2.getRaster().setDataElements(0, 0, mat2.cols(), mat2.rows(), imageData);

        String extn = null;
        /*   switch (extnsn) {
               case ".jpg":
           extn = "jpg";
           break;
               case ".png":
           extn = "png";
           break;
               case ".pdf":
           extn = "pdf";
           break;
               case ".tiff":
           extn = "tif";
           break;
                
           }*/
        //writing the grayscale image to the folder
        grayscle = new File(
                "C:\\bimla\\Dev\\java\\OCRSystem\\WebContent\\uploadedImage\\grayscale" + "." + "jpg");
        ImageIO.write(bImage2, "jpg", grayscle);
    } catch (IOException ex1) {
        System.out.println("" + ex1.getMessage());
    } catch (Exception ex1) {
        Logger.getLogger(grayscaleClass.class.getName()).log(Level.SEVERE, null, ex1);
    }
    return grayscle;

}

From source file:model.JointPDF.java

public JointPDF(Mat imgR, Mat imgO) {
    int x, y;//from w w  w.  j av a  2s  .  c om
    double count_red, count_green, count_blue, total_red = 0, total_green = 0, total_blue = 0;
    PDF_red = new double[256][256];
    PDF_green = new double[256][256];
    PDF_blue = new double[256][256];

    // Reference Image = x, Other Image = y
    // Make Joint Histogram
    for (int i = 0; i < imgR.rows(); i++) {
        for (int j = 0; j < imgR.cols(); j++) {
            double[] rgbR = imgR.get(i, j);
            double[] rgbO = imgO.get(i, j);

            // Search for Blue PDF
            y = (int) rgbO[0];
            x = (int) rgbR[0];
            PDF_blue[y][x] += 1;

            // Search for Green PDF
            y = (int) rgbO[1];
            x = (int) rgbR[1];
            PDF_green[y][x] += 1;

            // Search for Red PDF
            y = (int) rgbO[2];
            x = (int) rgbR[2];
            PDF_red[y][x] += 1;
        }
    }

    //        System.out.println("ORIGINAL");
    //        for (int i = 0; i < 256; i++) {
    //            for (int j = 0; j < 256; j++) {
    //                if (PDF_blue[i][j] > 0) {
    //                    System.out.println("(" + i + "," + j + "):" + PDF_blue[i][j]);
    //                }
    //            }
    //        }
    // Divide all pixel with Max number of pixel
    for (int i = 0; i < 256; i++) {
        for (int j = 0; j < 256; j++) {
            count_blue = PDF_blue[i][j];
            count_green = PDF_green[i][j];
            count_red = PDF_red[i][j];

            if (count_blue != 0) {
                PDF_blue[i][j] = count_blue / imgR.total();
                total_blue += PDF_blue[i][j];
            }
            if (count_green != 0) {
                PDF_green[i][j] = count_green / imgR.total();
                total_green += PDF_green[i][j];
            }
            if (count_red != 0) {
                PDF_red[i][j] = count_red / imgR.total();
                total_red += PDF_red[i][j];
            }
        }
    }

    // Normalize all pixel so total sum pixel is equal to 1
    for (int i = 0; i < 256; i++) {
        for (int j = 0; j < 256; j++) {
            count_blue = PDF_blue[i][j];
            count_green = PDF_green[i][j];
            count_red = PDF_red[i][j];

            if (count_blue != 0) {
                PDF_blue[i][j] = count_blue / total_blue;
            }
            if (count_green != 0) {
                PDF_green[i][j] = count_green / total_green;
            }
            if (count_red != 0) {
                PDF_red[i][j] = count_red / total_red;
            }
        }
    }
    //        System.out.println("NORMALIZE");
    //        for (int i = 0; i < 256; i++) {
    //            for (int j = 0; j < 256; j++) {
    //                if (PDF_red[i][j] > 0) {
    //                    System.out.println("(" + i + "," + j + "):" + String.format("%.4f",PDF_red[i][j]));
    //                }
    //            }
    //        }
}

From source file:mvision.Bhattacharyya.java

public Mat histogram(String img) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat image = Highgui.imread(img);//from   w  ww. j ava2s .co  m

    //Mat image = Highgui.imread("C:\\image1.jpg");

    //Mat src = new Mat(image.height(), image.width(), CvType.CV_8UC2);

    Imgproc.cvtColor(image, image, Imgproc.COLOR_RGB2HSV);
    java.util.List<Mat> matList = new LinkedList<Mat>();
    matList.add(image);
    Mat histogram = new Mat();
    MatOfFloat ranges = new MatOfFloat(0, 256);
    MatOfInt histSize = new MatOfInt(255);
    Imgproc.calcHist(matList, new MatOfInt(0), new Mat(), histogram, histSize, ranges);

    // Create space for histogram image
    Mat histImage = Mat.zeros(100, (int) histSize.get(0, 0)[0], CvType.CV_8UC1);

    histogram.convertTo(histogram, CvType.CV_32F);

    // Normalize histogram      
    Core.normalize(histogram, histogram, 1, histImage.rows(), Core.NORM_MINMAX, -1, new Mat());
    // Draw lines for histogram points
    for (int i = 0; i < (int) histSize.get(0, 0)[0]; i++) {
        Core.line(histImage, new org.opencv.core.Point(i, histImage.rows()),
                new org.opencv.core.Point(i, histImage.rows() - Math.round(histogram.get(i, 0)[0])),
                new Scalar(255, 255, 255), 1, 8, 0);
    }
    return histogram;

}

From source file:net.bsrc.cbod.opencv.OpenCV.java

/**
 * @param imagePath   name of the orginal image
 * @param mapFilePath name of the orginal image's map file
 * @return//from  www.  j  a  va 2 s  . c  o  m
 */
public static List<Mat> getSegmentedRegions(String imagePath, String mapFilePath, boolean isBlackBg) {

    Mat org = getImageMat(imagePath);
    RegionMap regionMap = RegionMapFactory.getRegionMap(imagePath, mapFilePath);

    List<Mat> result = new ArrayList<Mat>();

    Mat map = regionMap.getMap();

    for (Integer label : regionMap.getLabels()) {

        List<Point> points = new ArrayList<Point>();

        for (int i = 0; i < map.rows(); i++) {
            for (int j = 0; j < map.cols(); j++) {

                double[] temp = map.get(i, j);
                if (temp[0] == label) {
                    // Warning! col=x=j , row=y=i
                    points.add(new Point(j, i));
                }
            }
        }

        Point[] arr = points.toArray(new Point[points.size()]);
        Rect rect = Imgproc.boundingRect(new MatOfPoint(arr));

        Mat region;
        if (isBlackBg) {
            region = getImageWithBlackBg(org, points).submat(rect);
        } else {
            region = org.submat(rect);
        }
        result.add(region);
    }

    return result;
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

/**
 * @param imagePath//  www  . j  av a  2  s . c o  m
 * @param mapFilePath
 * @param isBlackBg
 * @return
 */
public static List<ImageModel> getSegmentedRegionsAsImageModels(String imagePath, String mapFilePath,
        boolean isBlackBg) {

    Mat org = getImageMat(imagePath);
    RegionMap regionMap = RegionMapFactory.getRegionMap(imagePath, mapFilePath);

    List<ImageModel> result = new ArrayList<ImageModel>();

    Mat map = regionMap.getMap();

    for (Integer label : regionMap.getLabels()) {

        List<Point> points = new ArrayList<Point>();

        for (int i = 0; i < map.rows(); i++) {
            for (int j = 0; j < map.cols(); j++) {

                double[] temp = map.get(i, j);
                if (temp[0] == label) {
                    // Warning! col=x=j , row=y=i
                    points.add(new Point(j, i));
                }
            }
        }

        Point[] arr = points.toArray(new Point[points.size()]);

        Rect rect = null;
        try {
            rect = Imgproc.boundingRect(new MatOfPoint(arr));
        } catch (Exception ex) {
            logger.error("", ex);
            continue;
        }

        Mat region;
        if (isBlackBg) {
            region = getImageWithBlackBg(org, points).submat(rect);
        } else {
            region = org.submat(rect);
        }

        ImageModel imgModel = new ImageModel();
        imgModel.setMat(region);
        imgModel.setRelativeToOrg(rect);

        result.add(imgModel);
    }

    return result;
}

From source file:net.semanticmetadata.lire.imageanalysis.features.local.opencvfeatures.CvSiftExtractor.java

License:Open Source License

@Override
public void extract(BufferedImage img) {
    MatOfKeyPoint keypoints = new MatOfKeyPoint();
    Mat descriptors = new Mat();
    List<KeyPoint> myKeys;/* w ww.  j  a va 2  s.  co m*/
    //        Mat img_object = Highgui.imread(image, 0); //0 = CV_LOAD_IMAGE_GRAYSCALE
    //        detector.detect(img_object, keypoints);
    byte[] data = ((DataBufferByte) img.getRaster().getDataBuffer()).getData();
    Mat matRGB = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC3);
    matRGB.put(0, 0, data);
    Mat matGray = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1);
    Imgproc.cvtColor(matRGB, matGray, Imgproc.COLOR_BGR2GRAY); //TODO: RGB or BGR?
    byte[] dataGray = new byte[matGray.rows() * matGray.cols() * (int) (matGray.elemSize())];
    matGray.get(0, 0, dataGray);

    detector.detect(matGray, keypoints);
    extractor.compute(matGray, keypoints, descriptors);
    myKeys = keypoints.toList();

    features = new LinkedList<CvSiftFeature>();
    KeyPoint key;
    CvSiftFeature feat;
    double[] desc;
    int cols, rows = myKeys.size();
    for (int i = 0; i < rows; i++) {
        cols = (descriptors.row(i)).cols();
        desc = new double[cols];
        key = myKeys.get(i);
        for (int j = 0; j < cols; j++) {
            desc[j] = descriptors.get(i, j)[0];
        }
        feat = new CvSiftFeature(key.pt.x, key.pt.y, key.size, desc);
        features.add(feat);
    }
}

From source file:net.semanticmetadata.lire.imageanalysis.features.local.opencvfeatures.CvSiftExtractor.java

License:Open Source License

public LinkedList<CvSiftFeature> computeSiftKeypoints(BufferedImage img) {
    MatOfKeyPoint keypoints = new MatOfKeyPoint();
    List<KeyPoint> myKeys;//w  w  w .  j  a  v a  2s. com
    //        Mat img_object = Highgui.imread(image, 0); //0 = CV_LOAD_IMAGE_GRAYSCALE
    //        detector.detect(img_object, keypoints);
    byte[] data = ((DataBufferByte) img.getRaster().getDataBuffer()).getData();
    Mat matRGB = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC3);
    matRGB.put(0, 0, data);
    Mat matGray = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1);
    Imgproc.cvtColor(matRGB, matGray, Imgproc.COLOR_BGR2GRAY); //TODO: RGB or BGR?
    byte[] dataGray = new byte[matGray.rows() * matGray.cols() * (int) (matGray.elemSize())];
    matGray.get(0, 0, dataGray);

    detector.detect(matGray, keypoints);
    myKeys = keypoints.toList();

    LinkedList<CvSiftFeature> myKeypoints = new LinkedList<CvSiftFeature>();
    KeyPoint key;
    CvSiftFeature feat;
    for (Iterator<KeyPoint> iterator = myKeys.iterator(); iterator.hasNext();) {
        key = iterator.next();
        feat = new CvSiftFeature(key.pt.x, key.pt.y, key.size, null);
        myKeypoints.add(feat);
    }

    return myKeypoints;
}