Example usage for org.opencv.core Mat height

List of usage examples for org.opencv.core Mat height

Introduction

In this page you can find the example usage for org.opencv.core Mat height.

Prototype

public int height() 

Source Link

Usage

From source file:imageanalyzercv.ImageAnalyzerCV.java

/**
 * @param args the command line arguments
 *///from www .jav a2s.  co m
public static void main(String[] args) {
    System.out.println("path: " + System.getProperty("java.library.path"));
    System.loadLibrary("opencv_java300");

    Mat m = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0763.JPG");
    System.out.println("m = " + m.height());
    MatOfKeyPoint points = new MatOfKeyPoint();
    FeatureDetector.create(FeatureDetector.SURF).detect(m, points);

    Mat m2 = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0764.JPG");
    System.out.println("m = " + m2.height());
    MatOfKeyPoint points2 = new MatOfKeyPoint();
    FeatureDetector.create(FeatureDetector.SURF).detect(m2, points2);

    DescriptorExtractor SurfExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK);
    Mat imag1Desc = new Mat();
    SurfExtractor.compute(m, points, imag1Desc);

    Mat imag2Desc = new Mat();
    SurfExtractor.compute(m2, points2, imag2Desc);

    MatOfDMatch matches = new MatOfDMatch();

    Mat imgd = new Mat();
    imag1Desc.copyTo(imgd);
    System.out.println(imgd.size());
    DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING).match(imag2Desc, imag1Desc,
            (MatOfDMatch) matches);

    double min_distance = 1000.0;
    double max_distance = 0.0;
    DMatch[] matchArr = matches.toArray();
    for (int i = 0; i < matchArr.length; i++) {
        if (matchArr[i].distance > max_distance)
            max_distance = matchArr[i].distance;
        if (matchArr[i].distance < min_distance)
            min_distance = matchArr[i].distance;
    }

    ArrayList<DMatch> good_matches = new ArrayList<DMatch>();

    System.out.println("Min Distance: " + min_distance + "  Max distance: " + max_distance);
    double totalScore = 0.0;
    for (int j = 0; j < imag1Desc.rows() && j < matchArr.length; j++) {
        if ((matchArr[j].distance <= (11 * min_distance)) && (matchArr[j].distance >= min_distance * 1)) {
            good_matches.add(matchArr[j]);
            //System.out.println(matchArr[j]);
            totalScore = totalScore + matchArr[j].distance;

        }
        //good_matches.add(matchArr[j]);

    }
    System.out.println((1 - (totalScore / (good_matches.size() * ((max_distance + min_distance) / 2)))) * 100);
    // System.out.println(matches.toList().size());
    Mat out = new Mat();
    MatOfDMatch mats = new MatOfDMatch();
    mats.fromList(good_matches);
    Features2d.drawMatches(m2, points2, m, points, mats, out);
    Highgui.imwrite("/Users/chintan/Downloads/one2.jpg", out);
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

private Mat compute_roi(Mat original) {
    Mat roi = new Mat();
    Imgproc.cvtColor(original, roi, Imgproc.COLOR_BGR2GRAY, 0);
    roi.setTo(new Scalar(0, 0, 0));
    int x = original.width();
    int y = original.height();
    int cx = x / 2;
    int cy = y / 2;
    int r = Math.min(cx, cy) * 2 / 3;
    Core.circle(roi, new Point(cx, cy), r, new Scalar(255, 255, 255), -1, 8, 0);
    return roi;/*from   ww w.ja v  a  2  s.c  o  m*/
}

From source file:io.appium.java_client.ScreenshotState.java

License:Apache License

private static Mat resizeFirstMatrixToSecondMatrixResolution(Mat first, Mat second) {
    if (first.width() != second.width() || first.height() != second.height()) {
        final Mat result = new Mat();
        final Size sz = new Size(second.width(), second.height());
        Imgproc.resize(first, result, sz);
        return result;
    }/*from  w w  w.  ja  v a  2s . c om*/
    return first;
}

From source file:io.appium.java_client.ScreenshotState.java

License:Apache License

/**
 * Compares two valid java bitmaps and calculates similarity score between them.
 *
 * @param refImage   reference image//from   ww  w  . ja v a2s.  c o  m
 * @param tplImage   template
 * @param resizeMode one of possible enum values. Set it either to <em>TEMPLATE_TO_REFERENCE_RESOLUTION</em> or
 *                   <em>REFERENCE_TO_TEMPLATE_RESOLUTION</em> if given bitmaps have different dimensions
 * @return similarity score value in range (-1.0, 1.0). 1.0 is returned if the images are equal
 * @throws ScreenshotComparisonError if provided images are not valid or have
 *                                   different resolution, but resizeMode has been set to <em>NO_RESIZE</em>
 */
public static double getOverlapScore(BufferedImage refImage, BufferedImage tplImage, ResizeMode resizeMode) {
    Mat ref = prepareImageForComparison(refImage);
    if (ref.empty()) {
        throw new ScreenshotComparisonError("Reference image cannot be converted for further comparison");
    }
    Mat tpl = prepareImageForComparison(tplImage);
    if (tpl.empty()) {
        throw new ScreenshotComparisonError("Template image cannot be converted for further comparison");
    }
    switch (resizeMode) {
    case TEMPLATE_TO_REFERENCE_RESOLUTION:
        tpl = resizeFirstMatrixToSecondMatrixResolution(tpl, ref);
        break;
    case REFERENCE_TO_TEMPLATE_RESOLUTION:
        ref = resizeFirstMatrixToSecondMatrixResolution(ref, tpl);
        break;
    default:
        // do nothing
    }

    if (ref.width() != tpl.width() || ref.height() != tpl.height()) {
        throw new ScreenshotComparisonError(
                "Resolutions of template and reference images are expected to be equal. "
                        + "Try different resizeMode value.");
    }

    Mat res = new Mat(ref.rows() - tpl.rows() + 1, ref.cols() - tpl.cols() + 1, CvType.CV_32FC1);
    Imgproc.matchTemplate(ref, tpl, res, Imgproc.TM_CCOEFF_NORMED);
    return Core.minMaxLoc(res).maxVal;
}

From source file:io.github.jakejmattson.facialrecognition.ImageFrame.java

License:Open Source License

/**
 * Convert an OpenCV Mat to a Java BufferedImage.
 *
 * @param matrix/*from w  w w.j  ava 2  s.  c o m*/
 *       OpenCV Mat
 *
 * @return BufferedImage
 */
private static BufferedImage convertMatToImage(Mat matrix) {
    int width = matrix.width();
    int height = matrix.height();
    int type = matrix.channels() != 1 ? BufferedImage.TYPE_3BYTE_BGR : BufferedImage.TYPE_BYTE_GRAY;

    if (type == BufferedImage.TYPE_3BYTE_BGR)
        Imgproc.cvtColor(matrix, matrix, Imgproc.COLOR_BGR2RGB);

    byte[] data = new byte[width * height * (int) matrix.elemSize()];
    matrix.get(0, 0, data);

    BufferedImage out = new BufferedImage(width, height, type);
    out.getRaster().setDataElements(0, 0, width, height, data);

    return out;
}

From source file:it.baywaylabs.jumpersumo.MainActivity.java

License:Open Source License

public void zxing(Mat mRgba) throws ChecksumException, FormatException {

    Bitmap bMap = Bitmap.createBitmap(mRgba.width(), mRgba.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(mRgba, bMap);//from  www.j a va  2 s . c  om
    int[] intArray = new int[bMap.getWidth() * bMap.getHeight()];
    //copy pixel data from the Bitmap into the 'intArray' array
    bMap.getPixels(intArray, 0, bMap.getWidth(), 0, 0, bMap.getWidth(), bMap.getHeight());

    LuminanceSource source = new RGBLuminanceSource(bMap.getWidth(), bMap.getHeight(), intArray);

    BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
    Reader reader = new QRCodeMultiReader();

    String sResult = "";
    Double AREA_RIFERIMENTO = 11500.0;

    try {

        Result result = reader.decode(bitmap);
        sResult = result.getText();
        if (result.getBarcodeFormat().compareTo(BarcodeFormat.QR_CODE) == 0)
            Log.d(TAG, "SI! E' Un QRCode");
        ResultPoint[] points = result.getResultPoints();
        Log.d(TAG, "PUNTI: " + points.toString());
        //for (ResultPoint point : result.getResultPoints()) {
        Point a = new Point(points[0].getX(), points[0].getY());
        Point b = new Point(points[2].getX(), points[2].getY());
        Rect rect = new Rect(a, b);
        Log.d(TAG, "Area del rettangolo: " + rect.area());
        if (rect.area() < AREA_RIFERIMENTO)
            Log.w(TAG, "Mi devo avvicinare!");
        else
            Log.w(TAG, "Mi devo allontanare!");
        Imgproc.rectangle(this.mRgba, new Point(points[0].getX(), points[0].getY()),
                new Point(points[2].getX(), points[2].getY()), new Scalar(0, 255, 0), 3);
        Log.d(TAG, sResult);
        Point center = new Point(0, 0);

        Imgproc.circle(this.mRgba, center, 10, new Scalar(0, 0, 255), 2);
        //if (!"".equals(sResult))
        //Toast.makeText(MainActivity.this, "QRCode Scanned: " + sResult, Toast.LENGTH_LONG).show();
    } catch (Resources.NotFoundException e) {
        Log.e(TAG, "Code Not Found");
        e.printStackTrace();
    } catch (NotFoundException e) {
        e.printStackTrace();
    }

}

From source file:javacv.JavaCV.java

/**
 * @param args the command line arguments
 */// w ww. j  av a  2 s.co m
public static void main(String[] args) {
    // TODO code application logic here

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("mat = " + mat.dump());

    CascadeClassifier faceDetector = new CascadeClassifier("./data/lbpcascade_frontalface.xml");
    //CascadeClassifier faceDetector = new CascadeClassifier();

    JFrame frame = new JFrame("BasicPanel");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(400, 400);
    JavaCV panel = new JavaCV();
    frame.setContentPane(panel);
    frame.setVisible(true);
    Mat webcam_image = new Mat();
    BufferedImage temp;
    VideoCapture capture;
    capture = new VideoCapture(0);

    if (capture.isOpened()) {
        while (true) {
            capture.read(webcam_image);
            if (!webcam_image.empty()) {
                frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60);

                MatOfRect faceDetections = new MatOfRect();
                faceDetector.detectMultiScale(webcam_image, faceDetections);

                //System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

                // Draw a bounding box around each face.
                for (Rect rect : faceDetections.toArray()) {
                    Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                            new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
                }

                temp = matToBufferedImage(webcam_image);
                panel.setimage(temp);
                panel.repaint();
            } else {
                System.out.println(" --(!) No captured frame -- Break!");
                break;
            }
        }
    }
    return;

}

From source file:logic.helpclass.Util.java

/**
 * Track template within the image/*  w  w w  . j  ava  2s  . c o m*/
 * @param grayFrame
 * @param rect
 * @param temp
 * @return 
 */
static public Rect trackTemplate(Mat grayFrame, Rect rect, Mat temp) {
    Rect searchRect = new Rect(new Point(rect.x - rect.width / 2, rect.y - rect.height / 2),
            new Point(rect.x + rect.width * 3 / 2, rect.y + rect.height * 3 / 2));

    Mat dst = new Mat(searchRect.width - temp.width() + 1, searchRect.height - temp.height() + 1, CV_32FC1);

    if ((searchRect.x < 0 || searchRect.y < 0) || (searchRect.x + searchRect.width > grayFrame.cols()
            || searchRect.y + searchRect.height > grayFrame.rows()))
        return null;

    Imgproc.matchTemplate(grayFrame.submat(searchRect), temp, dst, Imgproc.TM_SQDIFF_NORMED);

    Core.MinMaxLocResult result = Core.minMaxLoc(dst);

    //check new location: if coordinates change so variously, remain previous location
    if (true) {
        rect.x = (int) (searchRect.x + result.minLoc.x);
        rect.y = (int) (searchRect.y + result.minLoc.y);
        return rect;
    } else {
        return null;
    }
}

From source file:Main.Camera.CameraController.java

private void TakeShot(Mat I) {

    if (PictureCount <= 6) {
        System.err.println("CURRENT I WIDTH: " + I.width());
        System.err.println("CURRENT I Height: " + I.height());

        System.err.println("CURRENT FRAM WIDTH: " + currentFrame.fitWidthProperty().intValue());
        System.err.println("CURRENT FRAM Height: " + currentFrame.fitHeightProperty().intValue());

        //Mat croppedimage = I; //cropImage(image,rect);
        Mat resizeimage = new Mat();
        Size sz = new Size(150, 150);
        Imgproc.resize(I, resizeimage, sz);

        Mat uncropped = I;//from   w  ww.  ja  v  a  2  s.c  o  m
        uncropped.size().height = 10;
        uncropped.size().width = 30;
        //Mat cropped = fullImage(Rect(0,0,(I.width()/2),(I.height()/2));
        //            
        //            Print.Say("CURRENT PICTURE");
        //            Rect roi = new Rect(FaceRect.xProperty().intValue(),FaceRect.yProperty().intValue(), FaceRect.widthProperty().intValue(), FaceRect.heightProperty().intValue());
        //            
        //            Mat cropped = new Mat(uncropped, roi);
        BufferedImage BI = null;

        //Image imageToShow02 = mat2Image(cropped);
        Image imageToShow02 = mat2Image(resizeimage);

        currentPicture.setImage(imageToShow02);

        //Pictures[PictureCount] = matToBufferedImage(cropped,BI );
        Pictures[PictureCount] = matToBufferedImage(resizeimage, BI);

        Print.Say("\nPictures:" + Pictures[PictureCount] + "\n");
        PictureCount++;

    }

    Print.Say("\nSHOT TAKEN\n" + PictureCount);
}

From source file:main.PGMReader.java

public BufferedImage matToBufferedImage(Mat original) {
    // init/*from w w  w.j  a  va 2 s.c  om*/
    BufferedImage image = null;
    int width = original.width();
    int height = original.height();
    int channels = original.channels();

    byte[] sourcePixels = new byte[width * height * channels];
    original.get(0, 0, sourcePixels);

    if (original.channels() > 1) {
        image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    } else {
        image = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
    }
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);

    return image;
}