Example usage for org.opencv.core Mat width

List of usage examples for org.opencv.core Mat width

Introduction

In this page you can find the example usage for org.opencv.core Mat width.

Prototype

public int width() 

Source Link

Usage

From source file:it.baywaylabs.jumpersumo.MainActivity.java

License:Open Source License

public void zxing(Mat mRgba) throws ChecksumException, FormatException {

    Bitmap bMap = Bitmap.createBitmap(mRgba.width(), mRgba.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(mRgba, bMap);//from   w w  w.j  av  a2s.c om
    int[] intArray = new int[bMap.getWidth() * bMap.getHeight()];
    //copy pixel data from the Bitmap into the 'intArray' array
    bMap.getPixels(intArray, 0, bMap.getWidth(), 0, 0, bMap.getWidth(), bMap.getHeight());

    LuminanceSource source = new RGBLuminanceSource(bMap.getWidth(), bMap.getHeight(), intArray);

    BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
    Reader reader = new QRCodeMultiReader();

    String sResult = "";
    Double AREA_RIFERIMENTO = 11500.0;

    try {

        Result result = reader.decode(bitmap);
        sResult = result.getText();
        if (result.getBarcodeFormat().compareTo(BarcodeFormat.QR_CODE) == 0)
            Log.d(TAG, "SI! E' Un QRCode");
        ResultPoint[] points = result.getResultPoints();
        Log.d(TAG, "PUNTI: " + points.toString());
        //for (ResultPoint point : result.getResultPoints()) {
        Point a = new Point(points[0].getX(), points[0].getY());
        Point b = new Point(points[2].getX(), points[2].getY());
        Rect rect = new Rect(a, b);
        Log.d(TAG, "Area del rettangolo: " + rect.area());
        if (rect.area() < AREA_RIFERIMENTO)
            Log.w(TAG, "Mi devo avvicinare!");
        else
            Log.w(TAG, "Mi devo allontanare!");
        Imgproc.rectangle(this.mRgba, new Point(points[0].getX(), points[0].getY()),
                new Point(points[2].getX(), points[2].getY()), new Scalar(0, 255, 0), 3);
        Log.d(TAG, sResult);
        Point center = new Point(0, 0);

        Imgproc.circle(this.mRgba, center, 10, new Scalar(0, 0, 255), 2);
        //if (!"".equals(sResult))
        //Toast.makeText(MainActivity.this, "QRCode Scanned: " + sResult, Toast.LENGTH_LONG).show();
    } catch (Resources.NotFoundException e) {
        Log.e(TAG, "Code Not Found");
        e.printStackTrace();
    } catch (NotFoundException e) {
        e.printStackTrace();
    }

}

From source file:javacv.JavaCV.java

/**
 * @param args the command line arguments
 *//*from  www .  j av  a2  s .co m*/
public static void main(String[] args) {
    // TODO code application logic here

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("mat = " + mat.dump());

    CascadeClassifier faceDetector = new CascadeClassifier("./data/lbpcascade_frontalface.xml");
    //CascadeClassifier faceDetector = new CascadeClassifier();

    JFrame frame = new JFrame("BasicPanel");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(400, 400);
    JavaCV panel = new JavaCV();
    frame.setContentPane(panel);
    frame.setVisible(true);
    Mat webcam_image = new Mat();
    BufferedImage temp;
    VideoCapture capture;
    capture = new VideoCapture(0);

    if (capture.isOpened()) {
        while (true) {
            capture.read(webcam_image);
            if (!webcam_image.empty()) {
                frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60);

                MatOfRect faceDetections = new MatOfRect();
                faceDetector.detectMultiScale(webcam_image, faceDetections);

                //System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

                // Draw a bounding box around each face.
                for (Rect rect : faceDetections.toArray()) {
                    Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                            new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
                }

                temp = matToBufferedImage(webcam_image);
                panel.setimage(temp);
                panel.repaint();
            } else {
                System.out.println(" --(!) No captured frame -- Break!");
                break;
            }
        }
    }
    return;

}

From source file:logic.helpclass.Util.java

/**
 * Track template within the image//  w  w  w .j  av a  2 s. com
 * @param grayFrame
 * @param rect
 * @param temp
 * @return 
 */
static public Rect trackTemplate(Mat grayFrame, Rect rect, Mat temp) {
    Rect searchRect = new Rect(new Point(rect.x - rect.width / 2, rect.y - rect.height / 2),
            new Point(rect.x + rect.width * 3 / 2, rect.y + rect.height * 3 / 2));

    Mat dst = new Mat(searchRect.width - temp.width() + 1, searchRect.height - temp.height() + 1, CV_32FC1);

    if ((searchRect.x < 0 || searchRect.y < 0) || (searchRect.x + searchRect.width > grayFrame.cols()
            || searchRect.y + searchRect.height > grayFrame.rows()))
        return null;

    Imgproc.matchTemplate(grayFrame.submat(searchRect), temp, dst, Imgproc.TM_SQDIFF_NORMED);

    Core.MinMaxLocResult result = Core.minMaxLoc(dst);

    //check new location: if coordinates change so variously, remain previous location
    if (true) {
        rect.x = (int) (searchRect.x + result.minLoc.x);
        rect.y = (int) (searchRect.y + result.minLoc.y);
        return rect;
    } else {
        return null;
    }
}

From source file:Main.Camera.CameraController.java

private void TakeShot(Mat I) {

    if (PictureCount <= 6) {
        System.err.println("CURRENT I WIDTH: " + I.width());
        System.err.println("CURRENT I Height: " + I.height());

        System.err.println("CURRENT FRAM WIDTH: " + currentFrame.fitWidthProperty().intValue());
        System.err.println("CURRENT FRAM Height: " + currentFrame.fitHeightProperty().intValue());

        //Mat croppedimage = I; //cropImage(image,rect);
        Mat resizeimage = new Mat();
        Size sz = new Size(150, 150);
        Imgproc.resize(I, resizeimage, sz);

        Mat uncropped = I;//  ww w  .  ja v a  2 s.  c o  m
        uncropped.size().height = 10;
        uncropped.size().width = 30;
        //Mat cropped = fullImage(Rect(0,0,(I.width()/2),(I.height()/2));
        //            
        //            Print.Say("CURRENT PICTURE");
        //            Rect roi = new Rect(FaceRect.xProperty().intValue(),FaceRect.yProperty().intValue(), FaceRect.widthProperty().intValue(), FaceRect.heightProperty().intValue());
        //            
        //            Mat cropped = new Mat(uncropped, roi);
        BufferedImage BI = null;

        //Image imageToShow02 = mat2Image(cropped);
        Image imageToShow02 = mat2Image(resizeimage);

        currentPicture.setImage(imageToShow02);

        //Pictures[PictureCount] = matToBufferedImage(cropped,BI );
        Pictures[PictureCount] = matToBufferedImage(resizeimage, BI);

        Print.Say("\nPictures:" + Pictures[PictureCount] + "\n");
        PictureCount++;

    }

    Print.Say("\nSHOT TAKEN\n" + PictureCount);
}

From source file:main.PGMReader.java

public BufferedImage matToBufferedImage(Mat original) {
    // init/*from w w w.  j  a  va 2  s .  c  o  m*/
    BufferedImage image = null;
    int width = original.width();
    int height = original.height();
    int channels = original.channels();

    byte[] sourcePixels = new byte[width * height * channels];
    original.get(0, 0, sourcePixels);

    if (original.channels() > 1) {
        image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    } else {
        image = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
    }
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);

    return image;
}

From source file:main.Utils.java

public BufferedImage matToBufferedImage(Mat original) {
    // init//from   w w  w. j  a  v  a  2 s  . c o m
    BufferedImage image = null;
    int width = original.width(), height = original.height(), channels = original.channels();
    byte[] sourcePixels = new byte[width * height * channels];
    original.get(0, 0, sourcePixels);

    if (original.channels() > 1) {
        image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    } else {
        image = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
    }
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);

    return image;
}

From source file:mineshcvit.opendocscanner.CropImage.java

License:Apache License

private void makeDefault() {

    // minesh: finding the largest rect in the given image

    //Mat grayImage= Imgcodecs.imread(IMAGE_PATH, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);

    //////////////////////
    ////////////*w ww  .  java 2 s  .c o  m*/

    Mat imgSource = new Mat();

    Utils.bitmapToMat(mBitmap, imgSource);
    //  Utils.bitmapToMat(bmp32, imgMAT);

    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);

    //Mat imgSource = Imgcodecs.imread(mImagePath,Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
    Log.w("myApp", "image path from isnde makedefault() is " + mImagePath);

    int matwidth = imgSource.width();
    int matheight = imgSource.height();

    Log.w("myApp", "mat image width, from makedefault() is " + matwidth);
    Log.w("myApp", "mat image height from, makedefault() is " + matheight);

    Mat imageBin = new Mat();

    double threshold = Imgproc.threshold(imgSource, imageBin, 0, 255, Imgproc.THRESH_OTSU);
    Log.w("myApp", "otsu threshold is " + threshold);

    //for canny higher threshold is chosen as otsus threshold and lower threshold is half of the otsu threshold value
    Imgproc.Canny(imgSource.clone(), imgSource, threshold * 0.5, threshold);

    // Imgcodecs.imwrite(mImagePath, imgSource);

    // int canny_height=imgSource.height();
    //   int canny_width=imgSource.width();

    // Log.w("myApp", "canny image height is "+canny_height);

    Imgproc.GaussianBlur(imgSource, imgSource, new org.opencv.core.Size(3, 3), 3);
    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    //MatVector contours = new MatVector();

    Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();

    for (int idx = 0; idx < contours.size(); idx++) {
        temp_contour = contours.get(idx);
        double contourarea = Imgproc.contourArea(temp_contour);
        // compare this contour to the previous largest contour found
        if (contourarea > maxArea) {
            // check if this contour is a square
            MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
            int contourSize = (int) temp_contour.total();
            MatOfPoint2f approxCurve_temp = new MatOfPoint2f();
            Imgproc.approxPolyDP(new_mat, approxCurve_temp, contourSize * 0.05, true);
            if (approxCurve_temp.total() == 4) {
                maxArea = contourarea;
                approxCurve = approxCurve_temp;
            }
        }
    }
    double[] temp_double;
    temp_double = approxCurve.get(0, 0);
    Point p1 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p1,55,new Scalar(0,0,255));
    // Imgproc.warpAffine(sourceImage, dummy, rotImage,sourceImage.size());
    temp_double = approxCurve.get(1, 0);
    Point p2 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p2,150,new Scalar(255,255,255));
    temp_double = approxCurve.get(2, 0);
    Point p3 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p3,200,new Scalar(255,0,0));
    temp_double = approxCurve.get(3, 0);
    Point p4 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p4,100,new Scalar(0,0,255));
    ArrayList<Point> source = new ArrayList<Point>();
    ArrayList<Point> topPoints = new ArrayList<Point>();
    ArrayList<Point> bottomPoints = new ArrayList<Point>();
    ArrayList<Point> sortedPoints = new ArrayList<Point>();

    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);

    Collections.sort(source, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.y, o2.y);
        }
    });

    topPoints.add(source.get(0));
    topPoints.add(source.get(1));

    Collections.sort(topPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    bottomPoints.add(source.get(2));
    bottomPoints.add(source.get(3));

    Collections.sort(bottomPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    sortedPoints.add(topPoints.get(0));//top left
    sortedPoints.add(bottomPoints.get(0));//bottom left
    sortedPoints.add(bottomPoints.get(1));//bottom right
    sortedPoints.add(topPoints.get(1));//top right

    /*
    c++ code to sort the points
            
    void sortCorners(std::vector<cv::Point2f>& corners, cv::Point2f center)
    {
    std::vector<cv::Point2f> top, bot;
            
    for (int i = 0; i < corners.size(); i++)
    {
    if (corners[i].y < center.y)
    top.push_back(corners[i]);
    else
    bot.push_back(corners[i]);
    }
            
    cv::Point2f tl = top[0].x > top[1].x ? top[1] : top[0];
    cv::Point2f tr = top[0].x > top[1].x ? top[0] : top[1];
    cv::Point2f bl = bot[0].x > bot[1].x ? bot[1] : bot[0];
    cv::Point2f br = bot[0].x > bot[1].x ? bot[0] : bot[1];
            
    corners.clear();
    corners.push_back(tl);
    corners.push_back(tr);
    corners.push_back(br);
    corners.push_back(bl);
    }
            
    ...
            
    // Get mass center
    cv::Point2f center(0,0);
    for (int i = 0; i < corners.size(); i++)
    center += corners[i];
            
    center *= (1. / corners.size());
    sortCorners(corners, center);
            
            
            
     */

    //p1 t0 p4 are in the anti clock wise order starting from top left

    // double s=source.get(0).x;

    /////////////////
    /////////////////
    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Log.w("myApp", "bitmap width is " + width);
    Log.w("myApp", "bitmap height is " + height);

    Rect imageRect = new Rect(0, 0, width, height);

    // make the default size about 4/5 of the width or height

    /*
            
            int cropWidth = Math.min(width, height) * 4 / 5;
            int cropHeight = cropWidth;
            
            
            int x = (width - cropWidth) / 2;
            int y = (height - cropHeight) / 2;
            
            RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
            
    */
    /// To test the points order

    /*
    Point p1 = new Point(1.0*x,1.0*y );
    Point p2 = new Point(1.0*x+150.0,1.0*y+1.0*cropHeight);
            
    Point p3 = new Point(1.0*x+1.0*cropWidth,1.0*y+1.0*cropHeight);
            
    Point p4 = new Point(1.0*x+1.0*cropWidth,1.0*y);
            
    ArrayList<Point> source = new ArrayList<Point>();
    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);
            
    */
    ////////////////////////////

    Log.w("myApp",
            "from inside makedeafult inside cropimage calss, default crop rect values are set and now highlight view will be initiated ");

    HighlightView hv = new HighlightView(mImageView, imageRect, sortedPoints);

    Log.w("myApp", "higlight view initiated; done");

    mImageView.add(hv);
    Log.w("myApp", "add hv is done; done");

    mImageView.invalidate();
    mCrop = hv;

    Log.w("myApp", "mcrop=hv donee");
    mCrop.setFocus(true);
    ;
}

From source file:net.hydex11.opencvinteropexample.MainActivity.java

License:Open Source License

private void example() {
    RenderScript mRS = RenderScript.create(this);

    // Loads input image
    Bitmap inputBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.houseimage);

    // Puts input image inside an OpenCV mat
    Mat inputMat = new Mat();
    Utils.bitmapToMat(inputBitmap, inputMat);

    Mat outputMat = new Mat(inputMat.size(), inputMat.type());

    // Testing bitmap, used to test that the OpenCV mat actually has bitmap data inside
    Bitmap initialBitmap = Bitmap.createBitmap(inputMat.width(), inputMat.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(inputMat, initialBitmap);

    // Retrieve OpenCV mat data address
    long inputMatDataAddress = inputMat.dataAddr();
    long outputMatDataAddress = outputMat.dataAddr();

    // Creates a RS type that matches the input mat one.
    Element element = Element.RGBA_8888(mRS);
    Type.Builder tb = new Type.Builder(mRS, element);
    tb.setX(inputMat.width());/*from ww w .j  a  v  a2  s  .  c  o m*/
    tb.setY(inputMat.height());

    Type inputMatType = tb.create();

    // Creates a RenderScript allocation that uses directly the OpenCV input mat address
    Allocation inputAllocation = createTypedAllocationWithDataPointer(mRS, inputMatType, inputMatDataAddress);
    Allocation outputAllocation = createTypedAllocationWithDataPointer(mRS, inputMatType, outputMatDataAddress);

    // Define a simple convolve script
    // Note: here, ANY kernel can be applied!
    ScriptIntrinsicConvolve3x3 convolve3x3 = ScriptIntrinsicConvolve3x3.create(mRS, element);

    float convolveCoefficients[] = new float[9];
    convolveCoefficients[0] = 1;
    convolveCoefficients[2] = 1;
    convolveCoefficients[5] = 1;
    convolveCoefficients[6] = 1;
    convolveCoefficients[8] = 1;
    convolve3x3.setCoefficients(convolveCoefficients);

    convolve3x3.setInput(inputAllocation);
    convolve3x3.forEach(outputAllocation);

    mRS.finish();

    // Converts the result to a bitmap
    Bitmap cvOutputBitmap = Bitmap.createBitmap(outputMat.width(), outputMat.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(outputMat, cvOutputBitmap);

    // Testing bitmap, used to test the RenderScript ouput allocation contents
    // Note: it is placed here because the copyTo function clears the input buffer
    Bitmap rsOutputBitmap = Bitmap.createBitmap(outputMat.width(), outputMat.height(), Bitmap.Config.ARGB_8888);
    outputAllocation.copyTo(rsOutputBitmap);

    // Testing bitmap, used to test that RenderScript input allocation pointed to the OpenCV mat
    // Note: it is placed here because the copyTo function clears the input buffer
    Bitmap rsInitialBitmap = Bitmap.createBitmap(inputMat.width(), inputMat.height(), Bitmap.Config.ARGB_8888);
    inputAllocation.copyTo(rsInitialBitmap);

    // Display input and output
    ImageView originalImageIV = (ImageView) findViewById(R.id.imageView);
    ImageView inputRSImageIV = (ImageView) findViewById(R.id.imageView2);
    ImageView outputRSImageIV = (ImageView) findViewById(R.id.imageView3);
    ImageView outputCVIV = (ImageView) findViewById(R.id.imageView4);

    originalImageIV.setImageBitmap(initialBitmap);
    inputRSImageIV.setImageBitmap(rsInitialBitmap);
    outputRSImageIV.setImageBitmap(rsOutputBitmap);
    outputCVIV.setImageBitmap(cvOutputBitmap);

}

From source file:news_analysis.headlinedetection.HeadLineDetection.java

public boolean isHeadLine(Mat image) {
    Size imageSize = image.size();//  ww w  .  ja va 2  s  . c  om
    int width = image.width();
    int height = image.height();

    if (height > 50 && height < 100) {
        return horizontalChecked(image, width, height);
    }
    //VerticleChecked( image, width,  height);

    return false;
}

From source file:news_analysis.isimage.IsImage.java

public boolean isImage(Mat image) {
    Size imageSize = image.size();//from   w  ww  .  j  a v  a 2  s .  co  m
    int width = image.width();
    int height = image.height();

    borderDetection = new BorderDetection();
    ArrayList<BorderItem> borderItems = borderDetection.getBorder(image, width, height);
    Mat[] subMat = new Mat[borderItems.size()];
    for (int i = 0; i < borderItems.size(); i++) {
        BorderItem item = borderItems.get(i);
        if (item.getHeight() > 100 && item.getWidth() > 100) {
            item = canMaxiMizeBorder(item, item.getMinX(), item.getMaxX(), item.getMinY(), item.getMaxY(),
                    height, width);
            subMat[i] = image.submat(item.getMinX(), item.getMaxX(), item.getMinY(), item.getMaxY());

            //NewsAnalysis.imshow("Sub sub sub" + i, subMat[i]);
            int horizontal[] = horizontalChecked(subMat[i], item.getHeight() - 1, item.getWidth() - 1);
            int verticle[] = VerticleChecked(subMat[i], item.getHeight() - 1, item.getWidth() - 1);
            if (horizontal[0] + horizontal[1] > 110 && verticle[0] + verticle[1] > 110) {

                return true;
            }
            return true;
        }

    }

    return false;
}