Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:i2r.snap2inspect.SamplePresentation.java

License:Apache License

public void setImageDynamic(Mat m) {
    // convert to bitmap:
    Bitmap bm = Bitmap.createBitmap(m.cols(), m.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(m, bm);/*w  ww. j  a  va2 s .c  o  m*/
    mImageView.setImageBitmap(bm);
}

From source file:imageanalyzercv.ImageAnalyzerCV.java

/**
 * @param args the command line arguments
 *//*from   w w w .j  a  va  2 s  . com*/
public static void main(String[] args) {
    System.out.println("path: " + System.getProperty("java.library.path"));
    System.loadLibrary("opencv_java300");

    Mat m = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0763.JPG");
    System.out.println("m = " + m.height());
    MatOfKeyPoint points = new MatOfKeyPoint();
    FeatureDetector.create(FeatureDetector.SURF).detect(m, points);

    Mat m2 = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0764.JPG");
    System.out.println("m = " + m2.height());
    MatOfKeyPoint points2 = new MatOfKeyPoint();
    FeatureDetector.create(FeatureDetector.SURF).detect(m2, points2);

    DescriptorExtractor SurfExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK);
    Mat imag1Desc = new Mat();
    SurfExtractor.compute(m, points, imag1Desc);

    Mat imag2Desc = new Mat();
    SurfExtractor.compute(m2, points2, imag2Desc);

    MatOfDMatch matches = new MatOfDMatch();

    Mat imgd = new Mat();
    imag1Desc.copyTo(imgd);
    System.out.println(imgd.size());
    DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING).match(imag2Desc, imag1Desc,
            (MatOfDMatch) matches);

    double min_distance = 1000.0;
    double max_distance = 0.0;
    DMatch[] matchArr = matches.toArray();
    for (int i = 0; i < matchArr.length; i++) {
        if (matchArr[i].distance > max_distance)
            max_distance = matchArr[i].distance;
        if (matchArr[i].distance < min_distance)
            min_distance = matchArr[i].distance;
    }

    ArrayList<DMatch> good_matches = new ArrayList<DMatch>();

    System.out.println("Min Distance: " + min_distance + "  Max distance: " + max_distance);
    double totalScore = 0.0;
    for (int j = 0; j < imag1Desc.rows() && j < matchArr.length; j++) {
        if ((matchArr[j].distance <= (11 * min_distance)) && (matchArr[j].distance >= min_distance * 1)) {
            good_matches.add(matchArr[j]);
            //System.out.println(matchArr[j]);
            totalScore = totalScore + matchArr[j].distance;

        }
        //good_matches.add(matchArr[j]);

    }
    System.out.println((1 - (totalScore / (good_matches.size() * ((max_distance + min_distance) / 2)))) * 100);
    // System.out.println(matches.toList().size());
    Mat out = new Mat();
    MatOfDMatch mats = new MatOfDMatch();
    mats.fromList(good_matches);
    Features2d.drawMatches(m2, points2, m, points, mats, out);
    Highgui.imwrite("/Users/chintan/Downloads/one2.jpg", out);
}

From source file:imagegame.Camera.java

public static BufferedImage mat2BufferedImage(Mat mat) {
    //        MatOfByte buffer = new MatOfByte();
    //        Imgcodecs.imencode(".png", mat, buffer);
    int type = mat.channels() > 1 ? BufferedImage.TYPE_3BYTE_BGR : BufferedImage.TYPE_BYTE_GRAY;
    BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
    mat.get(0, 0, ((DataBufferByte) image.getRaster().getDataBuffer()).getData());
    return image;
    //return new Image(new ByteArrayInputStream(buffer.toArray()));
}

From source file:imageprocess.HistogramProcessor.java

public static Mat getHistogramImage(Mat image) {

    // Compute histogram first
    Mat hist = getGrayHistogram(image);/*from   w ww  .  ja  v a 2s.com*/
    // Get min and max bin values

    MinMaxLocResult locPeak = Core.minMaxLoc(hist);
    double maxVal = locPeak.maxVal;
    double minVal = locPeak.minVal;

    // Image on which to display histogram
    Mat histImg = new Mat(image.rows(), image.rows(), CV_8U, new Scalar(255));

    // set highest point at 90% of nbins
    int hpt = (int) (0.9 * 256);

    // Draw vertical line for each bin 
    for (int h = 0; h < 256; h++) {

        double[] f = hist.get(h, 0);
        float binVal = (float) f[0];
        int intensity = (int) (binVal * hpt / maxVal);
        Core.line(histImg, new Point(h, 256.0d), new Point(h, 256.0d - intensity), Scalar.all(0));
    }
    return histImg;
}

From source file:imageprocess.HistogramProcessor.java

public static Mat applyLookUp(Mat image, Mat lookup) {
    // Set output image (always 1-channel)
    Mat result = new Mat(image.rows(), image.cols(), CV_8U);

    //        for (int i = 0; i < image.cols(); i++) {
    //            for (int j = 0; j < image.rows(); j++) {
    //                double[] data = image.get(j, i);
    //                double newIntensity = lookup.get((int)data[0], 0)[0];
    //                result.put(j, i, newIntensity);
    //            }
    //        }//from   ww  w . j  ava  2s. co m
    Core.LUT(image, lookup, result);
    return result;
}

From source file:imageprocess.ObjectFinder.java

public Mat find(final Mat image, MatOfInt channels, MatOfFloat ranges) {

    Mat result = new Mat();

    if (isIsSparse()) { // call the right function based on histogram type

        Imgproc.calcBackProject(Arrays.asList(image), channels, // vector specifying what histogram dimensions belong to what image channels
                ROIHistogram, // the histogram we are using
                result, // the resulting back projection image
                ranges, // the range of values, for each dimension
                255.0 // the scaling factor is chosen such that a histogram value of 1 maps to 255
        );//w  ww . j  av a 2s. c o m

    } else {
        Imgproc.calcBackProject(Arrays.asList(image), channels, // vector specifying what histogram dimensions belong to what image channels
                ROIHistogram, // the histogram we are using
                result, // the resulting back projection image
                ranges, // the range of values, for each dimension
                255.0 // the scaling factor is chosen such that a histogram value of 1 maps to 255
        );
    }

    // Threshold back projection to obtain a binary image
    Mat thresholded = new Mat(result.rows(), result.cols(), result.type());
    if (getThreshold() > 0.0) {
        Imgproc.threshold(result, thresholded, 255 * getThreshold(), 255, THRESH_BINARY);
    }

    return thresholded;
}

From source file:imageprocess.PixelProcessor.java

public void salt(Mat image, int n) {
    for (int k = 0; k < n; k++) {
        int i = (int) (Math.random() * image.cols());
        int j = (int) (Math.random() * image.rows());
        if (image.channels() == 1) {
            image.put(j, i, 255);/*from   w w w  .  ja  va  2  s.c o m*/
        } else if (image.channels() == 3) {
            image.put(j, i, new byte[] { (byte) 255, (byte) 255, (byte) 255 });
        }
    }
}

From source file:interactivespaces.service.image.vision.opencv.MatUtils.java

License:Apache License

/**
 * Converts a {@link Mat} into a {@link BufferedImage}.
 *
 * @param matrix// w w w  . j  ava 2 s  .c  om
 *          Mat of type CV_8UC3 or CV_8UC1
 *
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *
 * @throws SimpleInteractiveSpacesException
 *           the OpenCV Mat type is not supported
 */
public static BufferedImage matToBufferedImage(Mat matrix) throws SimpleInteractiveSpacesException {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        for (int i = 0; i < data.length; i = i + 3) {
            byte b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        throw new SimpleInteractiveSpacesException("The OpenCV Mat type is not supported");
    }

    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);

    return image;
}

From source file:io.appium.java_client.ScreenshotState.java

License:Apache License

/**
 * Compares two valid java bitmaps and calculates similarity score between them.
 *
 * @param refImage   reference image/* w ww  .ja v  a 2  s. co m*/
 * @param tplImage   template
 * @param resizeMode one of possible enum values. Set it either to <em>TEMPLATE_TO_REFERENCE_RESOLUTION</em> or
 *                   <em>REFERENCE_TO_TEMPLATE_RESOLUTION</em> if given bitmaps have different dimensions
 * @return similarity score value in range (-1.0, 1.0). 1.0 is returned if the images are equal
 * @throws ScreenshotComparisonError if provided images are not valid or have
 *                                   different resolution, but resizeMode has been set to <em>NO_RESIZE</em>
 */
public static double getOverlapScore(BufferedImage refImage, BufferedImage tplImage, ResizeMode resizeMode) {
    Mat ref = prepareImageForComparison(refImage);
    if (ref.empty()) {
        throw new ScreenshotComparisonError("Reference image cannot be converted for further comparison");
    }
    Mat tpl = prepareImageForComparison(tplImage);
    if (tpl.empty()) {
        throw new ScreenshotComparisonError("Template image cannot be converted for further comparison");
    }
    switch (resizeMode) {
    case TEMPLATE_TO_REFERENCE_RESOLUTION:
        tpl = resizeFirstMatrixToSecondMatrixResolution(tpl, ref);
        break;
    case REFERENCE_TO_TEMPLATE_RESOLUTION:
        ref = resizeFirstMatrixToSecondMatrixResolution(ref, tpl);
        break;
    default:
        // do nothing
    }

    if (ref.width() != tpl.width() || ref.height() != tpl.height()) {
        throw new ScreenshotComparisonError(
                "Resolutions of template and reference images are expected to be equal. "
                        + "Try different resizeMode value.");
    }

    Mat res = new Mat(ref.rows() - tpl.rows() + 1, ref.cols() - tpl.cols() + 1, CvType.CV_32FC1);
    Imgproc.matchTemplate(ref, tpl, res, Imgproc.TM_CCOEFF_NORMED);
    return Core.minMaxLoc(res).maxVal;
}

From source file:io.smartspaces.service.image.vision.opencv.MatUtils.java

License:Apache License

/**
 * Converts a {@link Mat} into a {@link BufferedImage}.
 *
 * @param matrix/* w  w w.j  av a  2  s .c om*/
 *          Mat of type CV_8UC3 or CV_8UC1
 *
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *
 * @throws SimpleSmartSpacesException
 *           the OpenCV Mat type is not supported
 */
public static BufferedImage matToBufferedImage(Mat matrix) throws SimpleSmartSpacesException {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        for (int i = 0; i < data.length; i = i + 3) {
            byte b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        throw new SimpleSmartSpacesException("The OpenCV Mat type is not supported");
    }

    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);

    return image;
}