Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java

License:Open Source License

public static int findLongestLine(List<Mat> linesList) {
    int longestLine = 0;

    for (Mat lines : linesList) {
        for (int i = 0; i < lines.rows(); i++) {
            if (lineLength(lines.get(i, 0)) > longestLine)
                longestLine = lineLength(lines.get(i, 0));
        }//  w w  w  .j a v a  2  s .com
    }

    return longestLine;
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

private static void adjustXPosOfLines(Mat lines, int xPosAdjustment) {
    if (xPosAdjustment == 0)
        return;//  w  w  w .ja v  a  2 s.  com

    for (int i = 0; i < lines.rows(); i++) {
        double[] line = lines.get(i, 0);
        line[0] += xPosAdjustment;
        line[2] += xPosAdjustment;
        lines.put(i, 0, line);
    }
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

private static int countLinesInMats(List<Mat> lines) {
    int totalLines = 0;
    for (Mat mat : lines) {
        if (mat.rows() > 0)
            totalLines++;//from  w ww  . java 2 s .c o m
    }
    return totalLines;
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

private static String debugStringForLines(List<Mat> lines) {
    String debugString = "";
    for (Mat mat : lines) {
        if (mat.rows() > 0)
            debugString = debugString + "1";
        else/*from ww  w .j  av  a  2 s  .c  o m*/
            debugString = debugString + "0";
    }
    return debugString;
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

public HeroLine(Mat lines) {
    //rect = new android.graphics.Rect();//0, 0, -1, -1);

    if (lines.rows() == 0) {
        isRealLine = false;/* w w  w  .  ja  v a 2  s .c o  m*/
    } else {
        isRealLine = true;
        for (int i = 0; i < lines.rows(); i++) {
            double[] val = lines.get(i, 0);
            if (i == 0) {
                initialiseRect(val);
            } else {
                rect.union((int) val[0], (int) val[1]);
                rect.union((int) val[2], (int) val[3]);
            }
        }
        //System.out.println("Created rect with width: " + rect.width());
    }
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.ContoursFinder.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }/*www. java  2  s  .  c om*/
    Mat tmp = frame.clone();
    // Finding outer contours
    contourList.clear();
    Imgproc.findContours(tmp, contourList, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    // Filter bees
    Mat contours = new Mat(tmp.rows(), tmp.cols(), CvType.CV_8UC3);
    tmp.release();
    double area;
    Scalar color;
    numBees = 0;
    for (int i = 0; i < contourList.size(); i++) {
        area = Imgproc.contourArea(contourList.get(i));
        if (area > minArea && area < maxArea) {
            color = GREEN;
            numBees++;
        } else {
            color = RED;
        }
        // Draw contour
        Imgproc.drawContours(contours, contourList, i, color, -1);
    }
    return contours;
}

From source file:com.example.afs.makingmusic.process.ImageGenerator.java

License:Open Source License

private BufferedImage toBufferedImage(Mat matrix) {
    int width = matrix.cols();
    int height = matrix.rows();
    int channels = matrix.channels();
    int bitmapSize = height * width * channels;
    byte[] bitmap = new byte[bitmapSize];
    matrix.get(0, 0, bitmap);/*w ww.j a v a2  s . c o m*/
    BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    WritableRaster raster = image.getRaster();
    DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer();
    System.arraycopy(bitmap, 0, dataBuffer.getData(), 0, bitmap.length);
    return image;
}

From source file:com.example.yannic.remotefacedetection.agent.FaceDetectionAgent.java

License:Open Source License

public static BufferedImage matToBufferedImage(Mat matrix, BufferedImage bimg) {
    if (matrix != null) {
        int cols = matrix.cols();
        int rows = matrix.rows();
        int elemSize = (int) matrix.elemSize();
        byte[] data = new byte[cols * rows * elemSize];
        int type;
        matrix.get(0, 0, data);//from  w w  w. ja  va2  s .c om
        switch (matrix.channels()) {
        case 1:
            type = BufferedImage.TYPE_BYTE_GRAY;
            break;
        case 3:
            type = BufferedImage.TYPE_3BYTE_BGR;
            // bgr to rgb
            byte b;
            for (int i = 0; i < data.length; i = i + 3) {
                b = data[i];
                data[i] = data[i + 2];
                data[i + 2] = b;
            }
            break;
        default:
            return null;
        }

        if (bimg == null || bimg.getWidth() != cols || bimg.getHeight() != rows || bimg.getType() != type) {
            bimg = new BufferedImage(cols, rows, type);
        }
        bimg.getRaster().setDataElements(0, 0, cols, rows, data);
    } else {
        bimg = null;
    }
    return bimg;
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

Bitmap matToBitmap(Mat input) {
    if (input == null) {
        return Bitmap.createBitmap(0, 0, Bitmap.Config.ARGB_8888);
    }//  w  w w. java 2 s .c  om
    Mat tmp = new Mat();
    if (input.channels() == 1) {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_GRAY2RGB);
    } else {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_BGR2RGB);
    }
    Core.transpose(tmp, tmp);
    Core.flip(tmp, tmp, 1);

    Bitmap bm = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(tmp, bm);
    return bm;
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.ComputePose.java

License:Apache License

public boolean computePose(Mat rvec, Mat tvec, Mat image2) throws NyARException, FileNotFoundException {
    // convert image to NyAR style for processing
    final INyARRgbRaster imageRaster = NyARImageHelper.createFromMat(image2);

    // create new marker system configuration
    i_config = new NyARMarkerSystemConfig(i_param);
    markerSystemState = new NyARMarkerSystem(i_config);
    // Create wrapper that passes cam pictures to marker system
    cameraSensorWrapper = new NyARSensor(i_screen_size);
    ids = new int[markerPatterns.size()];
    patternmap = new HashMap<>();
    for (int i = 0; i < markerPatterns.size(); i++) {
        // create marker description from pattern file and add to marker
        // system
        ids[i] = markerSystemState.addARMarker(arCodes.get(i), 25, markerConfig.getMarkerSize());
        patternmap.put(ids[i], markerPatterns.get(i));
    }/*from  w  w  w .  j  a va 2s.  c o  m*/

    cameraSensorWrapper.update(imageRaster);
    markerSystemState.update(cameraSensorWrapper);

    // init 3D point list
    final List<Point3> points3dlist = new ArrayList<>();
    final List<Point> points2dlist = new ArrayList<>();

    for (final int id : ids) {
        // process only if this marker has been detected
        if (markerSystemState.isExistMarker(id) && markerSystemState.getConfidence(id) > 0.7) {
            // read and add 2D points
            final NyARIntPoint2d[] vertex2d = markerSystemState.getMarkerVertex2D(id);
            Point p = new Point(vertex2d[0].x, vertex2d[0].y);
            points2dlist.add(p);
            p = new Point(vertex2d[1].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[2].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[3].x, vertex2d[3].y);
            points2dlist.add(p);

            final MatOfPoint mop = new MatOfPoint();
            mop.fromList(points2dlist);
            final List<MatOfPoint> pts = new ArrayList<>();
            pts.add(mop);
            // read and add corresponding 3D points
            points3dlist.addAll(markerConfig.create3dpointlist(patternmap.get(id)));
            // draw red rectangle around detected marker
            Core.rectangle(image2, new Point(vertex2d[0].x, vertex2d[0].y),
                    new Point(vertex2d[2].x, vertex2d[2].y), new Scalar(0, 0, 255));
        }

    }
    // load 2D and 3D points to Mats for solvePNP
    final MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(points3dlist);
    final MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(points2dlist);

    if (visualization) {
        // show image with markers detected
        Imshow.show(image2);
    }

    // do not call solvePNP with empty intput data (no markers detected)
    if (points2dlist.size() == 0) {
        return false;
    }

    // uncomment these lines if using RANSAC-based pose estimation (more
    // shaking)
    Mat inliers = new Mat();

    Calib3d.solvePnPRansac(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec, false, 300, 5, 16,
            inliers, Calib3d.CV_P3P);
    ARLoc.getLog().info("Points detected: " + points2dlist.size() + " inliers: " + inliers.size());
    // avoid publish zero pose if localization failed
    if (inliers.rows() == 0) {
        return false;
    }

    return true;
}