Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public int get(int row, int col, double[] data) 

Source Link

Usage

From source file:gov.nasa.jpl.memex.pooledtimeseries.PoT.java

License:Apache License

static ArrayList<double[][]> computeGradients(Mat frame, int dim) {
    byte frame_array[] = new byte[(int) frame.total()];
    frame.get(0, 0, frame_array);

    ArrayList<double[][]> gradients = new ArrayList<double[][]>();

    for (int k = 0; k < dim; k++) {
        double angle = Math.PI * (double) k / (double) dim;

        double dx = Math.cos(angle) * 0.9999999;
        double dy = Math.sin(angle) * 0.9999999;

        double[][] grad = new double[frame.width()][frame.height()];

        for (int i = 0; i < frame.cols(); i++) {
            for (int j = 0; j < frame.rows(); j++) {
                if (i <= 1 || j <= 1 || i >= frame.cols() - 2 || j >= frame.rows() - 2) {
                    grad[i][j] = 0;/*from  ww w.j  a v  a  2  s . c om*/
                } else {
                    double f1 = interpolatePixel(frame_array, frame.cols(), (double) i + dx, (double) j + dy);
                    double f2 = interpolatePixel(frame_array, frame.cols(), (double) i - dx, (double) j - dy);

                    double diff = f1 - f2;
                    if (diff < 0)
                        diff = diff * -1;
                    if (diff >= 256)
                        diff = 255;

                    grad[i][j] = diff;
                }
            }
        }

        gradients.add(grad);
    }

    return gradients;
}

From source file:houghtransform.Capture.java

private static BufferedImage convertMatToBufferedImage(Mat mat) {
    byte[] data = new byte[mat.width() * mat.height() * (int) mat.elemSize()];
    int type;//from   w  ww .jav  a 2 s. c  o m
    mat.get(0, 0, data);

    switch (mat.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb    
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        throw new IllegalStateException("Unsupported number of channels");
    }

    BufferedImage out = new BufferedImage(mat.width(), mat.height(), type);

    out.getRaster().setDataElements(0, 0, mat.width(), mat.height(), data);

    return out;
}

From source file:houghtransform.transform_process.MyTransform.java

@Override
public void houghTransform(Mat edges) {
    Mat _edges = edges.clone();

    double radian = Math.PI / 180;
    int degrees = (int) Math.floor(theta * 180 / Math.PI + 0.5);

    int w = _edges.cols();
    int h = _edges.rows();

    _edges.convertTo(_edges, CvType.CV_64FC3);
    int size = w * h;
    double[] img_data = new double[size];
    _edges.get(0, 0, img_data); // Gets all pixels

    _img_w = w; //Number of columns
    _img_h = h; //Number of lines

    //Create the accumulator
    double hough_h = ((Math.sqrt(2.0) * (double) (h > w ? h : w)) / 2.0);
    _accu_h = (int) (hough_h * 2.0); // -r -> +r
    _accu_w = 180;//from ww w.j  a v a  2s . c o  m

    _accu = new int[_accu_h * _accu_w];
    for (int i = 0; i < _accu_h * _accu_w; i++) {
        _accu[i] = 0;
    }

    double center_x = w / 2;
    double center_y = h / 2;

    for (int y = 0; y < h; y++) {
        for (int x = 0; x < w; x++) {
            if (img_data[(y * w) + x] > 250) {
                for (int t = 0; t < 180; t = t + degrees) {
                    // y = x * cos( theta ) + y * sin( theta )
                    double r = (((double) x - center_x) * Math.cos((double) t * radian))
                            + (((double) y - center_y) * Math.sin((double) t * radian));
                    _accu[(int) ((Math.floor(r + hough_h) * 180.0)) + t]++;
                }
            }
        }
    }

    ArrayList<Point> lines = new ArrayList<>();

    if (_accu.length == 0)
        try {
            throw new IOException("MyTransform: _accu == 0");
        } catch (IOException ex) {
            System.out.println(ex);
        }

    for (int r = 0; r < _accu_h; r++) {
        for (int t = 0; t < _accu_w; t++) {
            // Searching in the accumulator a value greater
            //or equal to the set threshold
            if (((int) _accu[(r * _accu_w) + t]) >= threshold) {
                // Is this point a local maxima (9x9)
                int max = _accu[(r * _accu_w) + t];
                ////////////////////////////////
                for (int ly = -4; ly <= 4; ly++) {
                    for (int lx = -4; lx <= 4; lx++) {
                        if (((ly + r) >= 0 && (ly + r) < _accu_h) && ((lx + t) >= 0 && (lx + t) < _accu_w)) {
                            if ((int) _accu[((r + ly) * _accu_w) + (t + lx)] > max) {
                                max = _accu[((r + ly) * _accu_w) + (t + lx)];
                                ly = lx = 5;
                            }
                        }
                    }
                }
                /////////////////////////////////
                if (max > (int) _accu[(r * _accu_w) + t])
                    continue;

                Point point = new Point();
                point.x = r;
                point.y = t * radian;
                lines.add(point);
            }
        }
    }
    _lines = lines;
}

From source file:imagegame.Camera.java

public static BufferedImage mat2BufferedImage(Mat mat) {
    //        MatOfByte buffer = new MatOfByte();
    //        Imgcodecs.imencode(".png", mat, buffer);
    int type = mat.channels() > 1 ? BufferedImage.TYPE_3BYTE_BGR : BufferedImage.TYPE_BYTE_GRAY;
    BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
    mat.get(0, 0, ((DataBufferByte) image.getRaster().getDataBuffer()).getData());
    return image;
    //return new Image(new ByteArrayInputStream(buffer.toArray()));
}

From source file:interactivespaces.service.image.vision.opencv.MatUtils.java

License:Apache License

/**
 * Converts a {@link Mat} into a {@link BufferedImage}.
 *
 * @param matrix//w  ww .ja va  2 s.c  o  m
 *          Mat of type CV_8UC3 or CV_8UC1
 *
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *
 * @throws SimpleInteractiveSpacesException
 *           the OpenCV Mat type is not supported
 */
public static BufferedImage matToBufferedImage(Mat matrix) throws SimpleInteractiveSpacesException {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        for (int i = 0; i < data.length; i = i + 3) {
            byte b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        throw new SimpleInteractiveSpacesException("The OpenCV Mat type is not supported");
    }

    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);

    return image;
}

From source file:io.github.jakejmattson.facialrecognition.ImageFrame.java

License:Open Source License

/**
 * Convert an OpenCV Mat to a Java BufferedImage.
 *
 * @param matrix/* w ww. ja v a 2 s  .  c  o  m*/
 *       OpenCV Mat
 *
 * @return BufferedImage
 */
private static BufferedImage convertMatToImage(Mat matrix) {
    int width = matrix.width();
    int height = matrix.height();
    int type = matrix.channels() != 1 ? BufferedImage.TYPE_3BYTE_BGR : BufferedImage.TYPE_BYTE_GRAY;

    if (type == BufferedImage.TYPE_3BYTE_BGR)
        Imgproc.cvtColor(matrix, matrix, Imgproc.COLOR_BGR2RGB);

    byte[] data = new byte[width * height * (int) matrix.elemSize()];
    matrix.get(0, 0, data);

    BufferedImage out = new BufferedImage(width, height, type);
    out.getRaster().setDataElements(0, 0, width, height, data);

    return out;
}

From source file:io.smartspaces.service.image.vision.opencv.MatUtils.java

License:Apache License

/**
 * Converts a {@link Mat} into a {@link BufferedImage}.
 *
 * @param matrix/*  www .j a  v a  2  s.  com*/
 *          Mat of type CV_8UC3 or CV_8UC1
 *
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *
 * @throws SimpleSmartSpacesException
 *           the OpenCV Mat type is not supported
 */
public static BufferedImage matToBufferedImage(Mat matrix) throws SimpleSmartSpacesException {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        for (int i = 0; i < data.length; i = i + 3) {
            byte b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        throw new SimpleSmartSpacesException("The OpenCV Mat type is not supported");
    }

    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);

    return image;
}

From source file:jarvis.module.colourtracking.Mat2Image.java

BufferedImage getImage(Mat mat) {
    //----------Additional line-----------------------------------------------------
    Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGR);
    //------------------------------------------------------------------------------
    getSpace(mat);/*w w w .j  a  va  2s. c  om*/
    mat.get(0, 0, dat);
    img.getRaster().setDataElements(0, 0, mat.cols(), mat.rows(), dat);
    return img;
}

From source file:javaapplication1.Ocv.java

void directManip(String input, String output) {
    // load the image and read it into a matrix
    File f2 = new File(input);
    Mat image = Highgui.imread(this.input);

    // we're know we're working in 24-bit color dept, so prepare a 3-byte
    // array for storing cells from the Matrix
    byte[] vec3 = new byte[3];

    // Note: we are traversing every other column and every other row
    for (int i = 0; i < image.cols(); i += 2) {
        for (int j = 0; j < image.rows(); j += 2) {
            // get pixel, just to show how to get pixels...
            image.get(j, i, vec3);
            // create a zero pixel
            for (int z = 0; z < 3; ++z)
                vec3[z] = 0;/*w  w  w  . j ava 2s .  co m*/
            // set the current pixel to zero
            image.put(j, i, vec3);
        }
    }

    // output the file
    Highgui.imwrite(this.output, image);
}

From source file:javacv.JavaCV.java

/**
 * Converts/writes a Mat into a BufferedImage.
 *
 * @param matrix Mat of type CV_8UC3 or CV_8UC1
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *///  ww w . j  av a 2s  .  com
public static BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb  
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        return null;
    }
    BufferedImage image2 = new BufferedImage(cols, rows, type);
    image2.getRaster().setDataElements(0, 0, cols, rows, data);
    return image2;
}