Example usage for org.opencv.core Mat cols

List of usage examples for org.opencv.core Mat cols

Introduction

In this page you can find the example usage for org.opencv.core Mat cols.

Prototype

public int cols() 

Source Link

Usage

From source file:balldetection.Webcam.java

public static Image toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        Mat m2 = new Mat();
        Imgproc.cvtColor(m, m2, Imgproc.COLOR_BGR2RGB);
        type = BufferedImage.TYPE_3BYTE_BGR;
        m = m2;//from w  w  w  .j  a v  a2s. c  o m
    }
    byte[] b = new byte[m.channels() * m.cols() * m.rows()];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    image.getRaster().setDataElements(0, 0, m.cols(), m.rows(), b);
    return image;
}

From source file:bgslibrary.Utils.java

License:Open Source License

static final public BufferedImage toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1)
        type = BufferedImage.TYPE_3BYTE_BGR;
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:bikecalibration.fxml.controller.MainWindowController.java

private boolean createAndDrawNode(MouseEvent event) {
    try {//from ww  w  . j a  v  a2 s  .  c o m
        // create the node
        Mat original_mat = OpenCvUtils.getImageFromVideo(currentVideoFrameNumberProperty.get(), cap);
        double original_width = original_mat.cols();
        double resized_width = imageViewCanvas.getWidth();

        double[] orig_coords = Utils.calculateOriginalCoordinates(original_width, resized_width,
                new double[] { event.getX(), event.getY() });
        double[] orig_width = Utils.calculateOriginalCoordinates(original_width, resized_width,
                new double[] { NODE_WIDTH });
        Rect roi = new Rect((int) orig_coords[0], (int) orig_coords[1], (int) orig_width[0],
                (int) orig_width[0]);
        Mat roi_mat = original_mat.submat(roi);
        Node n = new Node((int) orig_coords[0], (int) orig_coords[1], cpickerNode.getValue().toString(),
                currentVideoFrameNumberProperty.getName(), null);
        n.setRoi(roi_mat);
        n.setId(nodeData.size());
        nodeData.add(n);

        // add the node to the nodes array
        Node[] currentFrameNodes = nodes[currentVideoFrameNumberProperty.get()];
        if (currentFrameNodes == null) {
            currentFrameNodes = new Node[1];
            currentFrameNodes[0] = n;
            nodes[currentVideoFrameNumberProperty.get()] = currentFrameNodes;
        } else {
            Node[] tmp = new Node[currentFrameNodes.length + 1];
            int i = 0;
            for (Node currentFrameNode : currentFrameNodes) {
                tmp[i] = currentFrameNode;
                ++i;
            }
            tmp[i] = n;
            nodes[currentVideoFrameNumberProperty.get()] = tmp;
        }
        // start editing current image
        drawImage(Utils.matToImage(original_mat));
        return true;
    } catch (Exception ex) {
        return false;
    }
}

From source file:bollettini.BullettinCompiler.java

public void show() {
    //resize to show
    Size size = new Size(1100, 335);
    Mat resize = new Mat();
    Imgproc.resize(bullettin, resize, size);

    //create image
    int type = BufferedImage.TYPE_BYTE_GRAY;
    int bufferSize = resize.channels() * resize.cols() * resize.rows();
    byte[] b = new byte[bufferSize];
    resize.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(resize.cols(), resize.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);

    ImageIcon icon = new ImageIcon(image);

    //create image and show
    View view = new View();
    view.init(this);
    view.setIcon(icon);//from  w w  w  .j a  va2 s.  c  om
    view.setVisible(true);
    view.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
}

From source file:by.zuyeu.deyestracker.core.util.CVCoreUtils.java

public static Mat selectSubmatByRect(Rect rect, Mat image) {
    double colScale = 1.0 * image.cols() / image.width();
    int colStart = (int) (1.0 * rect.x * colScale);
    int colEnd = (int) (1.0 * (rect.x + rect.width) * colScale);
    double rowScale = 1.0 * image.rows() / image.height();
    int rowStart = (int) (1.0 * rect.y * rowScale);
    int rowEnd = (int) (1.0 * (rect.y + rect.height) * rowScale);

    return image.submat(rowStart, rowEnd, colStart, colEnd);
}

From source file:by.zuyeu.deyestracker.core.util.CVCoreUtils.java

public static void insertSubmatByRect(Mat subImage, Rect rect, Mat origImage) {
    double colScale = 1.0 * origImage.cols() / origImage.width();
    int colStart = (int) (1.0 * rect.x * colScale);
    double rowScale = 1.0 * origImage.rows() / origImage.height();
    int rowStart = (int) (1.0 * rect.y * rowScale);
    for (int x1 = 0, x2 = colStart; x1 < subImage.cols(); x1++, x2++) {
        for (int y1 = 0, y2 = rowStart; y1 < subImage.rows(); y1++, y2++) {
            final double[] subImgData = subImage.get(y1, x1);
            origImage.put(y2, x2, subImgData);
        }//from w  ww  .  java 2  s  .c o m
    }
}

From source file:by.zuyeu.deyestracker.core.video.sampler.FaceInfoSampler.java

private Mat selectEyesRegionFromFace(final Mat faceImage) {
    return faceImage.submat(0, faceImage.rows() / 2, 0, faceImage.cols());
}

From source file:ch.hslu.pren.t37.camera.BildAuswertungKorb.java

public int bildAuswerten() {

    //Bild in dem gesucht werden soll
    String inFile = "../camera.jpg";
    //das Bild dass im infile gesucht wird
    String templateFile = "../Bilder/korb.jpg";
    //Lsung wird in diesem Bild prsentiert
    String outFile = "../LoesungsBild.jpg";
    //berprfungswert wird gesetzt
    int match_method = Imgproc.TM_CCOEFF_NORMED;

    //das original Bild und das zu suchende werden geladen
    Mat img = Highgui.imread(inFile, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat templ = Highgui.imread(templateFile, Highgui.CV_LOAD_IMAGE_COLOR);

    // Lsungsmatrix generieren
    int result_cols = img.cols() - templ.cols() + 1;
    int result_rows = img.rows() - templ.rows() + 1;
    Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1);

    // Suchen und normalisieren
    Imgproc.matchTemplate(img, templ, result, match_method);
    Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    // Mit MinMax Logik wird der beste "Match" gesucht
    Core.MinMaxLocResult mmr = Core.minMaxLoc(result);

    Point matchLoc;//from   ww w  .  ja  v a2 s .  co  m
    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
        matchLoc = mmr.minLoc;
    } else {
        matchLoc = mmr.maxLoc;
    }

    // Darstellen
    Core.rectangle(img, matchLoc, new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()),
            new Scalar(0, 255, 0), 10);

    // Alle 4 Eckpunkte speichern
    Point topLeft = new Point(matchLoc.x, matchLoc.y);
    Point topRight = new Point(matchLoc.x + templ.cols(), matchLoc.y);
    Point downLeft = new Point(matchLoc.x, matchLoc.y + templ.rows());
    Point downRight = new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows());

    // Lsungsbild speichern
    Highgui.imwrite(outFile, img);

    //Mittelpunkt berechnen
    double mittePicture;
    double mitteKorb;
    double differnez;

    Mat sol = Highgui.imread(outFile, Highgui.CV_LOAD_IMAGE_COLOR);

    mittePicture = sol.width() / 2;
    mitteKorb = (topRight.x - topLeft.x) / 2;
    mitteKorb = topLeft.x + mitteKorb;
    differnez = mitteKorb - mittePicture;

    logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Korb: " + mitteKorb);
    logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Bild: " + mittePicture);
    logger.log(PrenLogger.LogLevel.DEBUG,
            "Differenz: " + differnez + "\nWenn Differnez negativ, nach rechts drehen");

    return (int) differnez;
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.FaceDetection.java

License:Open Source License

public Rect[] getFaces(Mat img) {
    MatOfRect faces = new MatOfRect();
    List<Rect> facesList = null;
    float mRelativeFaceSize = 0.2f;
    int mAbsoluteFaceSize = 0;
    if (faceDetector != null) {
        // If no face detected --> rotate the picture 90 and try again
        angle = 0;//from   w  w w  . j a v  a2  s  . com
        for (int i = 1; i <= 4; i++) {
            int height = img.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            faceDetector.detectMultiScale(img, faces, 1.1, 2, 2, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
                    new Size());
            // Rotate by 90
            if (faces.empty()) {
                angle = 90 * i;
                MatOperation.rotate_90n(img, 90);
            } else {
                facesList = faces.toList();
                // Check that each found face rectangle fits in the image, if not, remove it
                for (Rect face : facesList) {
                    if (!(0 <= face.x && 0 <= face.width && face.x + face.width <= img.cols() && 0 <= face.y
                            && 0 <= face.height && face.y + face.height <= img.rows())) {
                        facesList.remove(face);
                    }
                }
                if (!(facesList.size() > 0)) {
                    return null;
                }
                // Faces found with the current image rotation
                this.img = img;
                break;
            }
        }

    } else {
        Log.e(TAG, "Detection method is not selected!");
    }
    if (facesList != null) {
        return (Rect[]) facesList.toArray();
    } else {
        return null;
    }
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.FaceDetection.java

License:Open Source License

public Eyes getEyes(Mat img) {
    double halfWidth = img.cols() / 2;
    double height = img.rows();
    double[] values = new double[4];
    values[0] = 0;//w  w  w  .j a va2  s  .  c  om
    values[1] = 0;
    values[2] = halfWidth;
    values[3] = height;
    Rect rightHalf = new Rect(values);
    values[0] = halfWidth;
    Rect leftHalf = new Rect(values);
    MatOfRect rightEyes = new MatOfRect();
    MatOfRect leftEyes = new MatOfRect();

    Mat rightHalfImg = img.submat(rightHalf);
    rightEyeDetector.detectMultiScale(rightHalfImg, rightEyes);
    Mat leftHalfImg = img.submat(leftHalf);
    leftEyeDetector.detectMultiScale(leftHalfImg, leftEyes);

    if (rightEyes.empty() || leftEyes.empty() || rightEyes.toArray().length > 1
            || leftEyes.toArray().length > 1) {
        return null;
    }

    Rect rightEye = rightEyes.toArray()[0];
    Rect leftEye = leftEyes.toArray()[0];

    MatOfFloat rightPoint = new MatOfFloat(rightEye.x + rightEye.width / 2, rightEye.y + rightEye.height / 2);
    MatOfFloat leftPoint = new MatOfFloat(img.cols() / 2 + leftEye.x + leftEye.width / 2,
            leftEye.y + leftEye.height / 2);

    MatOfFloat diff = new MatOfFloat();
    Core.subtract(leftPoint, rightPoint, diff);
    double angle = Core.fastAtan2(diff.toArray()[1], diff.toArray()[0]);
    double dist = Core.norm(leftPoint, rightPoint, Core.NORM_L2);
    Eyes eyes = new Eyes(dist, rightPoint, leftPoint, angle);
    return eyes;
}