Example usage for org.opencv.core Mat put

List of usage examples for org.opencv.core Mat put

Introduction

In this page you can find the example usage for org.opencv.core Mat put.

Prototype

public int put(int row, int col, byte[] data) 

Source Link

Usage

From source file:org.firstinspires.ftc.teamcode.vision.VisionLib.java

private Mat getCameraMat() {
    Image img = vuforia.getCurrentImage();
    if (img != null) {
        //construct mat to store image data
        Mat matIn = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC3);

        //convert pixel ByteBuffer to byte[]
        byte[] pixels = new byte[img.getPixels().remaining()];
        img.getPixels().get(pixels);/*w w  w  .  j  a  v a  2  s. co  m*/

        //put data
        matIn.put(0, 0, pixels);

        //rotate and convert
        Core.transpose(matIn, matIn);
        Core.flip(matIn, matIn, 1);
        Imgproc.cvtColor(matIn, matIn, Imgproc.COLOR_BGR2RGB);

        return matIn;
    }
    return null;
}

From source file:org.gearvrf.ipbsample.SampleViewManager.java

License:Apache License

@Override
public void onPreviewFrame(byte[] data, Camera camera) {
    setFps();/*w  w w  .  ja  v  a  2  s.c o  m*/
    getTouchPadInput();

    Mat yuvImage = new Mat(SampleProcessing.height * 3 / 2, SampleProcessing.width, CvType.CV_8UC1);
    yuvImage.put(0, 0, data);

    Mat rgbaImage = new Mat(SampleProcessing.height, SampleProcessing.width, CvType.CV_8UC4);
    Imgproc.cvtColor(yuvImage, rgbaImage, Imgproc.COLOR_YUV420sp2RGBA, 4);

    Mat outputImage = new Mat(SampleProcessing.height, SampleProcessing.width, CvType.CV_8UC4);

    outputImage = SampleProcessing.processImage(rgbaImage, viewNum);

    if (zoomNum > 0) {
        outputImage = SampleProcessing.zoomImage(outputImage, zoomNum);
    }

    if (showData) {
        outputImage = SampleProcessing.dataImage(outputImage, viewNum, zoomNum, imageFps, latitude, longitude,
                altitude);
    }

    Utils.matToBitmap(outputImage, bitmap);
    bitmapTexture.update(bitmap);
}

From source file:org.openpnp.machine.reference.ReferenceCamera.java

License:Open Source License

private Mat rotate(Mat mat, double rotation) {
    if (rotation == 0D) {
        return mat;
    }/*from   w w w .j  a v  a 2s .c o  m*/

    // See:
    // http://stackoverflow.com/questions/22041699/rotate-an-image-without-cropping-in-opencv-in-c
    Point center = new Point(mat.width() / 2D, mat.height() / 2D);
    Mat mapMatrix = Imgproc.getRotationMatrix2D(center, rotation, 1.0);

    // determine bounding rectangle
    Rect bbox = new RotatedRect(center, mat.size(), rotation).boundingRect();
    // adjust transformation matrix
    double[] cx = mapMatrix.get(0, 2);
    double[] cy = mapMatrix.get(1, 2);
    cx[0] += bbox.width / 2D - center.x;
    cy[0] += bbox.height / 2D - center.y;
    mapMatrix.put(0, 2, cx);
    mapMatrix.put(1, 2, cy);

    Mat dst = new Mat(bbox.width, bbox.height, mat.type());
    Imgproc.warpAffine(mat, dst, mapMatrix, bbox.size(), Imgproc.INTER_LINEAR);
    mat.release();

    mapMatrix.release();

    return dst;
}

From source file:org.openpnp.vision.FluentCv.java

License:Open Source License

public FluentCv toMat(BufferedImage img, String... tag) {
    Integer type = null;//from  w w w . j a  v a2  s .c o m
    if (img.getType() == BufferedImage.TYPE_BYTE_GRAY) {
        type = CvType.CV_8UC1;
    } else if (img.getType() == BufferedImage.TYPE_3BYTE_BGR) {
        type = CvType.CV_8UC3;
    } else {
        img = convertBufferedImage(img, BufferedImage.TYPE_3BYTE_BGR);
        type = CvType.CV_8UC3;
    }
    Mat mat = new Mat(img.getHeight(), img.getWidth(), type);
    mat.put(0, 0, ((DataBufferByte) img.getRaster().getDataBuffer()).getData());
    return store(mat, tag);
}

From source file:org.openpnp.vision.FluentCv.java

License:Open Source License

public FluentCv filterCirclesByDistance(double originX, double originY, double minDistance, double maxDistance,
        String... tag) {//from   w  w w.j av  a  2 s. c  om
    List<float[]> results = new ArrayList<>();
    for (int i = 0; i < this.mat.cols(); i++) {
        float[] circle = new float[3];
        this.mat.get(0, i, circle);
        float x = circle[0];
        float y = circle[1];
        float radius = circle[2];
        double distance = Math.sqrt(Math.pow(x - originX, 2) + Math.pow(y - originY, 2));
        if (distance >= minDistance && distance <= maxDistance) {
            results.add(new float[] { x, y, radius });
        }
    }
    // It really seems like there must be a better way to do this, but after hours
    // and hours of trying I can't find one. How the hell do you append an element
    // of 3 channels to a Mat?!
    Mat r = new Mat(1, results.size(), CvType.CV_32FC3);
    for (int i = 0; i < results.size(); i++) {
        r.put(0, i, results.get(i));
    }
    return store(r, tag);
}

From source file:org.openpnp.vision.FluentCv.java

License:Open Source License

/**
 * Filter circles as returned from e.g. houghCircles to only those that are within maxDistance
 * of the best fitting line./*ww  w. ja va2 s .  co  m*/
 * 
 * @param tag
 * @return
 */
public FluentCv filterCirclesToLine(double maxDistance, String... tag) {
    if (this.mat.cols() < 2) {
        return store(this.mat, tag);
    }

    List<Point> points = new ArrayList<>();
    // collect the circles into a list of points
    for (int i = 0; i < this.mat.cols(); i++) {
        float[] circle = new float[3];
        this.mat.get(0, i, circle);
        float x = circle[0];
        float y = circle[1];
        points.add(new Point(x, y));
    }

    Point[] line = Ransac.ransac(points, 100, maxDistance);
    Point a = line[0];
    Point b = line[1];

    // filter the points by distance from the resulting line
    List<float[]> results = new ArrayList<>();
    for (int i = 0; i < this.mat.cols(); i++) {
        float[] circle = new float[3];
        this.mat.get(0, i, circle);
        Point p = new Point(circle[0], circle[1]);
        if (pointToLineDistance(a, b, p) <= maxDistance) {
            results.add(circle);
        }
    }

    // It really seems like there must be a better way to do this, but after hours
    // and hours of trying I can't find one. How the hell do you append an element
    // of 3 channels to a Mat?!
    Mat r = new Mat(1, results.size(), CvType.CV_32FC3);
    for (int i = 0; i < results.size(); i++) {
        r.put(0, i, results.get(i));
    }
    return store(r, tag);
}

From source file:org.pattern.image.loaders.JPEGImporter.java

private static ParticleImage importJPEG(File file) {

    BufferedImage tempImage;//www.ja  v a  2  s.  c  om
    WritableRaster tempRaster;
    byte[] values;

    try {
        tempImage = ImageIO.read(file);
        tempRaster = tempImage.getRaster();
        values = new byte[tempImage.getWidth() * tempImage.getHeight()];
        int c = 0;
        for (int y = 0; y < tempImage.getHeight(); y++) {
            for (int x = 0; x < tempImage.getWidth(); x++) {
                values[c++] = (byte) tempRaster.getSample(x, y, 0);
            }
        }

        Mat mat = new Mat(tempImage.getHeight(), tempImage.getWidth(), CvType.CV_8UC1);
        mat.put(0, 0, values);

        return new ParticleImage(mat);

    } catch (IOException ex) {
        Logger.getLogger("Loading").log(Level.SEVERE, null, ex);
    }

    return null;
}

From source file:org.pattern.image.loaders.MRCImporter.java

/**
 * Provides import from MRC./*from   ww w .j a v  a 2 s. c o m*/
 *
 * @param file file to import from
 * @return imported data
 */
private MultiImage importMRC(File path) {
    BufferedImageReader r = new BufferedImageReader();

    try {
        r.setId(path.getAbsolutePath());
        int imageWidth = r.getSizeX();
        int imageHeight = r.getSizeY();
        int nImages = r.getImageCount();

        List<ParticleImage> images = new ArrayList<>(nImages);

        if (p != null) {
            p.switchToDeterminate(nImages);
        }

        for (int i = 0; i < nImages; i++) {
            Mat mat = new Mat(imageHeight, imageWidth, CvType.CV_8UC1);
            mat.put(0, 0, r.openBytes(i));
            images.add(new ParticleImage(mat));
            updateProgress(i);
        }

        r.close();

        if (images.size() == 1) {
            return new UniImage(images.get(0));
        }
        return new MultiImage(images);

    } catch (FormatException | IOException ex) {
        //            Logger.getLogger(PDataImporterFactory.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:org.pattern.image.loaders.TIFFImporter.java

@Override
public MultiImage importData(File path) {

    //get image reader for tiff
    final BufferedImageReader reader = new BufferedImageReader();
    ParticleImage[] images = null;/* w ww . ja v a 2  s .c o m*/
    try {
        reader.setId(path.getAbsolutePath());

        int x = reader.getSizeX();
        int y = reader.getSizeY();
        int t = reader.getImageCount();
        images = new ParticleImage[t];

        if (p != null) {
            p.switchToDeterminate(t);
        }

        for (int i = 0; i < t; i++) {
            Mat mat = new Mat(y, x, CvType.CV_8UC1);
            mat.put(0, 0, reader.openBytes(i));
            images[i] = new ParticleImage(mat);
            updateProgress(i);
        }

        reader.close();
    } catch (FormatException | IOException ex) {
        Exceptions.printStackTrace(ex);
    }
    if (images != null) {
        if (images.length == 1) {
            return new UniImage(images[0]);
        } else {
            return new MultiImage(Arrays.asList(images));
        }
    }
    return null;
}

From source file:org.sikuli.android.ADBDevice.java

License:MIT License

public Mat captureDeviceScreenMat(int x, int y, int w, int h) {
    byte[] imagePrefix = new byte[12];
    byte[] image = new byte[0];
    int actW = w;
    if (x + w > devW) {
        actW = devW - x;//from  w  w w . j a  v a 2  s .c o  m
    }
    int actH = h;
    if (y + h > devH) {
        actH = devH - y;
    }
    Debug timer = Debug.startTimer();
    try {
        InputStream stdout = device.executeShell("screencap");
        stdout.read(imagePrefix);
        if (imagePrefix[8] != 0x01) {
            log(-1, "captureDeviceScreenMat: image type not RGBA");
            return null;
        }
        if (byte2int(imagePrefix, 0, 4) != devW || byte2int(imagePrefix, 4, 4) != devH) {
            log(-1, "captureDeviceScreenMat: width or height differ from device values");
            return null;
        }
        image = new byte[actW * actH * 4];
        int lenRow = devW * 4;
        byte[] row = new byte[lenRow];
        for (int count = 0; count < y; count++) {
            stdout.read(row);
        }
        boolean shortRow = x + actW < devW;
        for (int count = 0; count < actH; count++) {
            if (shortRow) {
                stdout.read(row);
                System.arraycopy(row, x * 4, image, count * actW * 4, actW * 4);
            } else {
                stdout.read(image, count * actW * 4, actW * 4);
            }
        }
        long duration = timer.end();
        log(lvl, "captureDeviceScreenMat:[%d,%d %dx%d] %d", x, y, actW, actH, duration);
    } catch (IOException | JadbException e) {
        log(-1, "captureDeviceScreenMat: [%d,%d %dx%d] %s", x, y, actW, actH, e);
    }
    Mat matOrg = new Mat(actH, actW, CvType.CV_8UC4);
    matOrg.put(0, 0, image);
    Mat matImage = new Mat();
    Imgproc.cvtColor(matOrg, matImage, Imgproc.COLOR_RGBA2BGR, 3);
    return matImage;
}