Example usage for org.opencv.core Mat total

List of usage examples for org.opencv.core Mat total

Introduction

In this page you can find the example usage for org.opencv.core Mat total.

Prototype

public long total() 

Source Link

Usage

From source file:com.trandi.opentld.tld.Util.java

License:Apache License

static float[] getFloatArray(final Mat mat) {
    if (CvType.CV_32FC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_32FC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_floatBuff.length != size) {
        _floatBuff = new float[size];
    }/*  w ww .jav  a2 s .c o  m*/
    mat.get(0, 0, _floatBuff); // 0 for row and col means the WHOLE Matrix
    return _floatBuff;
}

From source file:com.trandi.opentld.tld.Util.java

License:Apache License

static double[] getDoubleArray(final Mat mat) {
    if (CvType.CV_64F != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_64F, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_doubleBuff.length != size) {
        _doubleBuff = new double[size];
    }/*from  w  ww.  j  a  v a 2 s.co  m*/
    mat.get(0, 0, _doubleBuff); // 0 for row and col means the WHOLE Matrix
    return _doubleBuff;
}

From source file:edu.wpi.cscore.RawCVMatSource.java

License:Open Source License

/**
 * Put an OpenCV image and notify sinks.
 *
 * <p>Only 8-bit single-channel or 3-channel (with BGR channel order) images
 * are supported. If the format, depth or channel order is different, use
 * Mat.convertTo() and/or cvtColor() to convert it first.
 *
 * @param image OpenCV image//from  ww w.  j  ava2 s .  c o  m
 */
public void putFrame(Mat image) {
    int channels = image.channels();
    if (channels != 1 && channels != 3) {
        throw new VideoException("Unsupported Image Type");
    }
    int imgType = channels == 1 ? PixelFormat.kGray.getValue() : PixelFormat.kBGR.getValue();
    CameraServerJNI.putRawSourceFrame(m_handle, image.dataAddr(), image.width(), image.height(), imgType,
            (int) image.total() * channels);
}

From source file:gov.nasa.jpl.memex.pooledtimeseries.PoT.java

License:Apache License

static ArrayList<double[][]> computeGradients(Mat frame, int dim) {
    byte frame_array[] = new byte[(int) frame.total()];
    frame.get(0, 0, frame_array);/*from w ww.  j  av a2  s.  c  o m*/

    ArrayList<double[][]> gradients = new ArrayList<double[][]>();

    for (int k = 0; k < dim; k++) {
        double angle = Math.PI * (double) k / (double) dim;

        double dx = Math.cos(angle) * 0.9999999;
        double dy = Math.sin(angle) * 0.9999999;

        double[][] grad = new double[frame.width()][frame.height()];

        for (int i = 0; i < frame.cols(); i++) {
            for (int j = 0; j < frame.rows(); j++) {
                if (i <= 1 || j <= 1 || i >= frame.cols() - 2 || j >= frame.rows() - 2) {
                    grad[i][j] = 0;
                } else {
                    double f1 = interpolatePixel(frame_array, frame.cols(), (double) i + dx, (double) j + dy);
                    double f2 = interpolatePixel(frame_array, frame.cols(), (double) i - dx, (double) j - dy);

                    double diff = f1 - f2;
                    if (diff < 0)
                        diff = diff * -1;
                    if (diff >= 256)
                        diff = 255;

                    grad[i][j] = diff;
                }
            }
        }

        gradients.add(grad);
    }

    return gradients;
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

public Scalar process(Bitmap bmp) {
    // convert the image to OpenCV format
    Log.d("bifrostcore", "create original image");
    Mat original_alpha = new Mat();
    Assert.assertNotNull(original_alpha);
    Utils.bitmapToMat(bmp, original_alpha);
    // remove alpha
    Mat original = new Mat();
    Imgproc.cvtColor(original_alpha, original, Imgproc.COLOR_RGBA2RGB, 0);
    Log.d("bifrostcore", "image size: " + String.valueOf(original.total()));

    // compute an ROI
    Mat roi = compute_roi(original);//from  w  w w  . j a  va  2 s. c  o  m

    Log.d("bifrostcore", "smooth image");
    // smooth the image
    Mat smoothed = smooth_image(original);

    Log.d("bifrostcore", "convert to hsv");
    Mat hsv = toHSV(smoothed);

    Log.d("bifrostcore", "extract main region");
    // extract main region using histogram
    Mat main_region = extract_main_region(hsv, roi);

    // threshold to preserve only the most significant regions
    Mat main_region_threshold = threshold_mask(main_region);
    saveImage(main_region_threshold);

    Log.d("bifrostcore", "return mean value");
    // return the mean value
    return Core.mean(original, main_region_threshold);
}

From source file:model.JointPDF.java

public JointPDF(Mat imgR, Mat imgO) {
    int x, y;/*from   w  ww  .j  a  v a  2  s  .  c o  m*/
    double count_red, count_green, count_blue, total_red = 0, total_green = 0, total_blue = 0;
    PDF_red = new double[256][256];
    PDF_green = new double[256][256];
    PDF_blue = new double[256][256];

    // Reference Image = x, Other Image = y
    // Make Joint Histogram
    for (int i = 0; i < imgR.rows(); i++) {
        for (int j = 0; j < imgR.cols(); j++) {
            double[] rgbR = imgR.get(i, j);
            double[] rgbO = imgO.get(i, j);

            // Search for Blue PDF
            y = (int) rgbO[0];
            x = (int) rgbR[0];
            PDF_blue[y][x] += 1;

            // Search for Green PDF
            y = (int) rgbO[1];
            x = (int) rgbR[1];
            PDF_green[y][x] += 1;

            // Search for Red PDF
            y = (int) rgbO[2];
            x = (int) rgbR[2];
            PDF_red[y][x] += 1;
        }
    }

    //        System.out.println("ORIGINAL");
    //        for (int i = 0; i < 256; i++) {
    //            for (int j = 0; j < 256; j++) {
    //                if (PDF_blue[i][j] > 0) {
    //                    System.out.println("(" + i + "," + j + "):" + PDF_blue[i][j]);
    //                }
    //            }
    //        }
    // Divide all pixel with Max number of pixel
    for (int i = 0; i < 256; i++) {
        for (int j = 0; j < 256; j++) {
            count_blue = PDF_blue[i][j];
            count_green = PDF_green[i][j];
            count_red = PDF_red[i][j];

            if (count_blue != 0) {
                PDF_blue[i][j] = count_blue / imgR.total();
                total_blue += PDF_blue[i][j];
            }
            if (count_green != 0) {
                PDF_green[i][j] = count_green / imgR.total();
                total_green += PDF_green[i][j];
            }
            if (count_red != 0) {
                PDF_red[i][j] = count_red / imgR.total();
                total_red += PDF_red[i][j];
            }
        }
    }

    // Normalize all pixel so total sum pixel is equal to 1
    for (int i = 0; i < 256; i++) {
        for (int j = 0; j < 256; j++) {
            count_blue = PDF_blue[i][j];
            count_green = PDF_green[i][j];
            count_red = PDF_red[i][j];

            if (count_blue != 0) {
                PDF_blue[i][j] = count_blue / total_blue;
            }
            if (count_green != 0) {
                PDF_green[i][j] = count_green / total_green;
            }
            if (count_red != 0) {
                PDF_red[i][j] = count_red / total_red;
            }
        }
    }
    //        System.out.println("NORMALIZE");
    //        for (int i = 0; i < 256; i++) {
    //            for (int j = 0; j < 256; j++) {
    //                if (PDF_red[i][j] > 0) {
    //                    System.out.println("(" + i + "," + j + "):" + String.format("%.4f",PDF_red[i][j]));
    //                }
    //            }
    //        }
}

From source file:org.firstinspires.ftc.teamcode.AutonomousVuforia.java

public int getBeaconConfig(Image img, VuforiaTrackable beacon, CameraCalibration camCal) {

    OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();
    telemetry.addData("Stuff", pose != null);
    telemetry.addData("Stuff", img != null);
    try {/*  w  w w . java 2 s  .c om*/
        telemetry.addData("Stuff", img.getPixels() != null);
    } catch (Exception e) {
        telemetry.addData("Stuff", e);
    }
    telemetry.update();

    if (pose != null && img != null && img.getPixels() != null) {
        Matrix34F rawPose = new Matrix34F();
        float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
        rawPose.setData(poseData);

        float[][] corners = new float[4][2];

        corners[0] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 276, 0)).getData();
        corners[1] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 276, 0)).getData();
        corners[2] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 92, 0)).getData();
        corners[3] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 92, 0)).getData();

        Bitmap bm = Bitmap.createBitmap(img.getWidth(), img.getHeight(), Bitmap.Config.RGB_565);
        bm.copyPixelsFromBuffer(img.getPixels());

        Mat crop = new Mat(bm.getHeight(), bm.getWidth(), CvType.CV_8UC3);
        Utils.bitmapToMat(bm, crop);

        float x = Math.min(Math.min(corners[1][0], corners[3][0]), Math.min(corners[0][0], corners[2][0]));
        float y = Math.min(Math.min(corners[1][1], corners[3][1]), Math.min(corners[0][1], corners[2][1]));
        float width = Math.max(Math.abs(corners[0][0] - corners[2][0]),
                Math.abs(corners[1][0] - corners[3][0]));
        float height = Math.max(Math.abs(corners[0][1] - corners[2][1]),
                Math.abs(corners[1][1] - corners[3][1]));

        x = Math.max(x, 0);
        y = Math.max(y, 0);
        width = (x + width > crop.cols()) ? crop.cols() - x : width;
        height = (y + height > crop.rows()) ? crop.rows() - y : height;

        Mat cropped = new Mat(crop, new Rect((int) x, (int) y, (int) width, (int) height));

        Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_RGB2HSV_FULL);

        Mat mask = new Mat();
        Core.inRange(cropped, blueLow, blueHigh, mask);
        Moments mmnts = Imgproc.moments(mask, true);

        if (mmnts.get_m00() > mask.total() * 0.8) {
            return BEACON_ALL_BLUE;
        } else if (mmnts.get_m00() < mask.total() * 0.8) {
            return BEACON_NO_BLUE;
        }

        if ((mmnts.get_m01() / mmnts.get_m00()) < cropped.rows() / 2) {

            return BEACON_RED_BLUE;
        } else {

            return BEACON_BLUERED;
        } // else

    }

    return BEACON_NOT_VISIBLE;
}

From source file:org.openpnp.vision.FluentCv.java

License:Open Source License

public static double calculatePsnr(Mat I1, Mat I2) {
    Mat s1 = new Mat();
    Core.absdiff(I1, I2, s1); // |I1 - I2|
    s1.convertTo(s1, CvType.CV_32F); // cannot make a square on 8 bits
    s1 = s1.mul(s1); // |I1 - I2|^2

    Scalar s = Core.sumElems(s1); // sum elements per channel

    double sse = s.val[0] + s.val[1] + s.val[2]; // sum channels

    if (sse <= 1e-10) // for small values return zero
        return 0;
    else {/*from  w  w w  .ja  va 2  s.c  o  m*/
        double mse = sse / (double) (I1.channels() * I1.total());
        double psnr = 10.0 * Math.log10((255 * 255) / mse);
        return psnr;
    }
}

From source file:qupath.opencv.processing.PixelImageCV.java

License:Open Source License

public PixelImageCV(Mat mat) {
    // Extract dimensions and pixels
    this.width = (int) mat.size().width;
    this.height = (int) mat.size().height;

    pixels = new float[(int) mat.total()];
    if (mat.depth() == CvType.CV_32F)
        mat.get(0, 0, pixels);/*from   ww  w .j a v  a2s  .  c  o  m*/
    else {
        Mat mat2 = new Mat();
        mat.convertTo(mat2, CvType.CV_32F);
        mat2.get(0, 0, pixels);
    }
}

From source file:syncleus.dann.data.video.TLDUtil.java

License:Apache License

/**
 * The corresponding Java primitive array type depends on the Mat type:
 * CV_8U and CV_8S -> byte[]/*from  w w w.j  a v  a  2  s .  c om*/
 * CV_16U and CV_16S -> short[]
 * CV_32S -> int[]
 * CV_32F -> float[]
 * CV_64F-> double[]
 */
public static byte[] getByteArray(final Mat mat) {
    if (CvType.CV_8UC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_8UC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_byteBuff.length != size) {
        _byteBuff = new byte[size];
    }
    mat.get(0, 0, _byteBuff); // 0 for row and col means the WHOLE Matrix
    return _byteBuff;
}