Example usage for org.opencv.core Mat clone

List of usage examples for org.opencv.core Mat clone

Introduction

In this page you can find the example usage for org.opencv.core Mat clone.

Prototype

public Mat clone() 

Source Link

Usage

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private Optional<MatOfPoint2f> findIdealCorners(final Mat frame, final MatOfPoint2f estimatedPatternRect) {
    Mat traceMat = null;/*from w w  w  .  j  ava 2s.  c  o m*/
    if (logger.isTraceEnabled()) {
        Mat traceMatTemp = frame.clone();
        traceMat = new Mat();

        Imgproc.cvtColor(traceMatTemp, traceMat, Imgproc.COLOR_GRAY2BGR);
    }

    // pixel distance, dynamic because we want to allow any resolution or
    // distance from pattern
    final int toleranceThreshold = (int) (minimumDimension / (double) (PATTERN_HEIGHT - 1) / 1.5);

    // Grey scale conversion.
    //final Mat grey = new Mat();
    //Imgproc.cvtColor(frame, grey, Imgproc.COLOR_BGR2GRAY);
    final Mat grey = frame;

    // Find edges
    Imgproc.Canny(grey, grey, CANNY_THRESHOLD_1, CANNY_THRESHOLD_2);

    // Blur the lines, otherwise the lines algorithm does not consider them
    Imgproc.GaussianBlur(grey, grey, gaussianBlurSize, GAUSSIANBLUR_SIGMA);

    if (logger.isTraceEnabled()) {
        logger.trace("tolerance threshold {} minimumDimension {}", toleranceThreshold, minimumDimension);

        String filename = String.format("calibrate-undist-grey-lines.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, grey);
    }

    if (logger.isDebugEnabled())
        logger.debug("estimation {} {} {} {}", estimatedPatternRect.get(0, 0), estimatedPatternRect.get(1, 0),
                estimatedPatternRect.get(2, 0), estimatedPatternRect.get(3, 0));

    // Easier to work off of Points
    final Point[] estimatedPoints = matOfPoint2fToPoints(estimatedPatternRect);

    if (logger.isTraceEnabled()) {
        Core.circle(traceMat, estimatedPoints[0], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[1], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[2], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[3], 1, new Scalar(0, 0, 255), -1);
    }

    // Find lines
    // These parameters are just guesswork right now
    final Mat mLines = new Mat();
    final int minLineSize = (int) (minimumDimension * .90);
    final int lineGap = toleranceThreshold;

    // Do it
    Imgproc.HoughLinesP(grey, mLines, HOUGHLINES_RHO, HOUGHLINES_THETA, HOUGHLINES_THRESHOLD, minLineSize,
            lineGap);

    // Find the lines that match our estimates
    final Set<double[]> verifiedLines = new HashSet<double[]>();

    for (int x = 0; x < mLines.cols(); x++) {
        final double[] vec = mLines.get(0, x);
        final double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        final Point start = new Point(x1, y1);
        final Point end = new Point(x2, y2);

        if (nearPoints(estimatedPoints, start, toleranceThreshold)
                && nearPoints(estimatedPoints, end, toleranceThreshold)) {
            verifiedLines.add(vec);

            if (logger.isTraceEnabled()) {
                Core.line(traceMat, start, end, new Scalar(255, 0, 0), 1);
            }
        }
    }

    if (logger.isTraceEnabled())
        logger.trace("verifiedLines: {}", verifiedLines.size());

    // Reduce the lines to possible corners
    final Set<Point> possibleCorners = new HashSet<Point>();

    for (double[] line1 : verifiedLines) {
        for (double[] line2 : verifiedLines) {
            if (line1 == line2)
                continue;

            Optional<Point> intersection = computeIntersect(line1, line2);

            if (intersection.isPresent())
                possibleCorners.add(intersection.get());
        }
    }

    // Reduce the possible corners to ideal corners
    Point[] idealCorners = new Point[4];
    final double[] idealDistances = { toleranceThreshold, toleranceThreshold, toleranceThreshold,
            toleranceThreshold };

    for (Point pt : possibleCorners) {
        for (int i = 0; i < 4; i++) {
            final double distance = euclideanDistance(pt, estimatedPoints[i]);

            if (distance < idealDistances[i]) {
                idealDistances[i] = distance;
                idealCorners[i] = pt;
            }
        }
    }

    if (logger.isTraceEnabled()) {
        logger.trace("idealDistances {} {} {} {}", idealDistances[0], idealDistances[1], idealDistances[2],
                idealDistances[3]);

        String filename = String.format("calibrate-lines.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Verify that we have the corners we need
    for (Point pt : idealCorners) {
        if (pt == null)
            return Optional.empty();

        if (logger.isTraceEnabled()) {
            logger.trace("idealCorners {}", pt);
            Core.circle(traceMat, pt, 1, new Scalar(0, 255, 255), -1);
        }
    }

    if (logger.isTraceEnabled()) {
        String filename = String.format("calibrate-lines-with-corners.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Sort them into the correct order
    // 1st-------2nd
    // | |
    // | |
    // | |
    // 3rd-------4th
    idealCorners = sortCorners(idealCorners);

    // build the MatofPoint2f
    final MatOfPoint2f sourceCorners = new MatOfPoint2f();
    sourceCorners.alloc(4);

    for (int i = 0; i < 4; i++) {
        sourceCorners.put(i, 0, new double[] { idealCorners[i].x, idealCorners[i].y });
    }

    return Optional.of(sourceCorners);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private void initializeWarpPerspective(final Mat frame, final MatOfPoint2f sourceCorners) {
    final MatOfPoint2f destCorners = new MatOfPoint2f();
    destCorners.alloc(4);/*from ww w .j a v  a2 s .  c om*/

    // 1st-------2nd
    // | |
    // | |
    // | |
    // 3rd-------4th
    destCorners.put(0, 0, new double[] { boundsRect.boundingRect().x, boundsRect.boundingRect().y });
    destCorners.put(1, 0, new double[] { boundsRect.boundingRect().x + boundsRect.boundingRect().width,
            boundsRect.boundingRect().y });
    destCorners.put(2, 0, new double[] { boundsRect.boundingRect().x,
            boundsRect.boundingRect().y + boundsRect.boundingRect().height });
    destCorners.put(3, 0, new double[] { boundsRect.boundingRect().x + boundsRect.boundingRect().width,
            boundsRect.boundingRect().y + boundsRect.boundingRect().height });

    if (logger.isDebugEnabled()) {
        logger.debug("initializeWarpPerspective {} {} {} {}", sourceCorners.get(0, 0), sourceCorners.get(1, 0),
                sourceCorners.get(2, 0), sourceCorners.get(3, 0));
        logger.debug("initializeWarpPerspective {} {} {} {}", destCorners.get(0, 0), destCorners.get(1, 0),
                destCorners.get(2, 0), destCorners.get(3, 0));
    }

    perspMat = Imgproc.getPerspectiveTransform(sourceCorners, destCorners);

    int width = boundsRect.boundingRect().width;
    int height = boundsRect.boundingRect().height;

    // Make them divisible by two for video recording purposes
    if ((width & 1) == 1)
        width++;
    if ((height & 1) == 1)
        height++;

    boundingBox = new BoundingBox(boundsRect.boundingRect().x, boundsRect.boundingRect().y, width, height);

    warpInitialized = true;

    if (logger.isTraceEnabled()) {
        Mat debugFrame = frame.clone();

        Core.circle(debugFrame, new Point(sourceCorners.get(0, 0)[0], sourceCorners.get(0, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);
        Core.circle(debugFrame, new Point(sourceCorners.get(1, 0)[0], sourceCorners.get(1, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);
        Core.circle(debugFrame, new Point(sourceCorners.get(2, 0)[0], sourceCorners.get(2, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);
        Core.circle(debugFrame, new Point(sourceCorners.get(3, 0)[0], sourceCorners.get(3, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);

        Core.circle(debugFrame, new Point(destCorners.get(0, 0)[0], destCorners.get(0, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);
        Core.circle(debugFrame, new Point(destCorners.get(1, 0)[0], destCorners.get(1, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);
        Core.circle(debugFrame, new Point(destCorners.get(2, 0)[0], destCorners.get(2, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);
        Core.circle(debugFrame, new Point(destCorners.get(3, 0)[0], destCorners.get(3, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);

        Core.line(debugFrame, new Point(boundingBox.getMinX(), boundingBox.getMinY()),
                new Point(boundingBox.getMaxX(), boundingBox.getMinY()), new Scalar(0, 255, 0));
        Core.line(debugFrame, new Point(boundingBox.getMinX(), boundingBox.getMinY()),
                new Point(boundingBox.getMinX(), boundingBox.getMaxY()), new Scalar(0, 255, 0));
        Core.line(debugFrame, new Point(boundingBox.getMaxX(), boundingBox.getMaxY()),
                new Point(boundingBox.getMaxX(), boundingBox.getMinY()), new Scalar(0, 255, 0));
        Core.line(debugFrame, new Point(boundingBox.getMaxX(), boundingBox.getMaxY()),
                new Point(boundingBox.getMinX(), boundingBox.getMaxY()), new Scalar(0, 255, 0));

        String filename = String.format("calibrate-transformation.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, debugFrame);
    }
}

From source file:com.sikulix.api.Picture.java

License:Open Source License

public Picture(Mat mat) {
    this();/*from  w  w  w  .  jav  a 2 s  . com*/
    if (SX.isNull(mat)) {
        setContent(new Mat());
    } else {
        long start = new Date().getTime();
        setContent(mat.clone());
        timeToLoad = new Date().getTime() - start;
    }
    init(0, 0, getContent().width(), getContent().height());
    setAttributes();
}

From source file:cx.uni.jk.mms.iaip.filter.LogOfOnePlusAbs.java

License:Open Source License

@Override
public Mat convert(Mat mat) {

    /** make absolute values and log */
    Mat tempMat = mat.clone();
    Core.absdiff(tempMat, new Scalar(0.0d), tempMat);
    Core.add(tempMat, new Scalar(1.0d), tempMat);
    Core.log(tempMat, tempMat);//from  w w w.  j  a va 2s.  c  o  m

    /** find contrast and brightness to fit into 8 bit */
    MinMaxLocResult mmlr = Core.minMaxLoc(tempMat);
    double min = Math.min(mmlr.minVal, 0);
    double max = mmlr.maxVal;
    double alpha = 256.0d / (max - min);
    double beta = -min * alpha;

    /** conversion to 8 bit Mat applying contrast alpha and brightness beta */
    Mat byteMat = new MatOfByte();
    tempMat.convertTo(byteMat, CvType.CV_8U, alpha, beta);

    return byteMat;
}

From source file:cx.uni.jk.mms.iaip.filter.LogRedBlue.java

License:Open Source License

@Override
public Mat convert(Mat mat) {

    MinMaxLocResult negativeMmlr, positiveMmlr;
    double min, max, alpha, beta;

    /** negative values to positive and log */
    Mat negativeMat = mat.clone();
    Core.min(negativeMat, new Scalar(0.0d), negativeMat);
    Core.multiply(negativeMat, new Scalar(-1.0d), negativeMat);
    Core.add(negativeMat, new Scalar(1.0d), negativeMat);
    Core.log(negativeMat, negativeMat);//from  ww  w  .j  a va  2  s .  c  om

    /** positve values log */
    Mat positiveMat = mat.clone();
    Core.max(positiveMat, new Scalar(0.0d), positiveMat);
    Core.add(positiveMat, new Scalar(1.0d), positiveMat);
    Core.log(positiveMat, positiveMat);

    /** find common contrast and brightness to fit into 8 bit */
    negativeMmlr = Core.minMaxLoc(negativeMat);
    positiveMmlr = Core.minMaxLoc(positiveMat);
    min = 0;
    max = Math.max(negativeMmlr.maxVal, positiveMmlr.maxVal);
    alpha = 256.0d / (max - min);
    beta = -min * alpha;

    /** conversion of both matrices to 8 bit */
    negativeMat.convertTo(negativeMat, CvType.CV_8UC1, alpha, beta);
    positiveMat.convertTo(positiveMat, CvType.CV_8UC1, alpha, beta);

    /** combine both matrices into one 8 bit 3 channel rgb picture */
    Mat tempMat = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC3);
    List<Mat> mixSrcMats = new ArrayList<>();
    mixSrcMats.add(negativeMat); // 1 channel: 0
    mixSrcMats.add(positiveMat); // 1 channel: 1
    List<Mat> mixDstMats = new ArrayList<>();
    mixDstMats.add(tempMat); // 3 channels: 0-2
    MatOfInt fromToMat = new MatOfInt(0, 0 /* neg->red */, -1, 1/*
                                                                * null->green
                                                                */, 1, 2 /*
                                                                          * pos-
                                                                          * >
                                                                          * blue
                                                                          */);
    Core.mixChannels(mixSrcMats, mixDstMats, fromToMat);

    return tempMat;
}

From source file:cx.uni.jk.mms.iaip.filter.LogYellowCyan.java

License:Open Source License

@Override
public Mat convert(Mat mat) {

    MinMaxLocResult negativeMmlr, positiveMmlr;
    double min, max, alpha, beta;

    /** negative values to positive and log */
    Mat negativeMat = mat.clone();
    Core.min(negativeMat, new Scalar(0.0d), negativeMat);
    Core.multiply(negativeMat, new Scalar(-1.0d), negativeMat);
    Core.add(negativeMat, new Scalar(1.0d), negativeMat);
    Core.log(negativeMat, negativeMat);//from  w  w w .  j  ava 2  s .  co  m

    /** positve values log */
    Mat positiveMat = mat.clone();
    Core.max(positiveMat, new Scalar(0.0d), positiveMat);
    Core.add(positiveMat, new Scalar(1.0d), positiveMat);
    Core.log(positiveMat, positiveMat);

    /** find common contrast and brightness to fit into 8 bit */
    negativeMmlr = Core.minMaxLoc(negativeMat);
    positiveMmlr = Core.minMaxLoc(positiveMat);
    min = 0;
    max = Math.max(negativeMmlr.maxVal, positiveMmlr.maxVal);
    alpha = 256.0d / (max - min);
    beta = -min * alpha;

    /** conversion of both matrices to 8 bit */
    negativeMat.convertTo(negativeMat, CvType.CV_8UC1, alpha, beta);
    positiveMat.convertTo(positiveMat, CvType.CV_8UC1, alpha, beta);

    /** create additional mat for saturated green */
    Mat brightMat = negativeMat.clone();
    Core.max(negativeMat, positiveMat, brightMat);
    // Core.absdiff(brightMat, new Scalar(255.0d), brightMat);
    // Core.multiply(brightMat, new Scalar(1.0d/3.0d), brightMat);

    /** combine all matrices into one 8 bit 3 channel rgb picture */
    Mat tempMat = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC3);
    List<Mat> mixSrcMats = new ArrayList<>();
    mixSrcMats.add(negativeMat); // 1 channel: 0
    mixSrcMats.add(positiveMat); // 1 channel: 1
    mixSrcMats.add(brightMat); // 1 channel: 2
    List<Mat> mixDstMats = new ArrayList<>();
    mixDstMats.add(tempMat); // 3 channels: 0-2
    MatOfInt fromToMat = new MatOfInt(0, 0 /* neg->red */, 2, 1/*
                                                               * avg->green
                                                               */, 1, 2 /*
                                                                         * pos-
                                                                         * >
                                                                         * blue
                                                                         */);
    Core.mixChannels(mixSrcMats, mixDstMats, fromToMat);

    return tempMat;
}

From source file:depthDataFromStereoCamsOpenCV.ProcessImages.java

/**
 * Applies Canny edge detection algorithm
 * @param Mat image//from   ww w.  ja  va 2 s.  c om
 * @return Mat
 */
public static Mat applyCannyEdgeDetectorOpenCV_Mat(Mat image) {
    //TODO
    //all these parameters have to be investigated
    Mat result = image.clone();
    int lowThreshold = 40;
    int ratio = 120;
    int kernel_size = 3;

    Imgproc.blur(result, result, new Size(2, 2));
    Imgproc.Canny(result, result, lowThreshold, ratio, kernel_size, true);
    //Imgproc.threshold(result,result,64, 254,Imgproc.THRESH_BINARY_INV);
    return result;
}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Finds the bounding box for the book on the stand using 
 * the high resolution image.//from w  w w .  j a v  a  2 s  .  co  m
 * @param src- High Resolution image of the book
 * @return Rectangle delineating the book
 */
public Rect highRes(Mat src) {
    Mat dst = src.clone();
    Imgproc.blur(src, dst, new Size(100.0, 100.0), new Point(-1, -1), Core.BORDER_REPLICATE);
    Imgproc.threshold(dst, dst, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
    Imgproc.Canny(dst, dst, 50, 200, 3, false);

    List<MatOfPoint> contours = new LinkedList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE,
            new Point(0, 0));

    Mat color = new Mat();
    Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR);
    for (int k = 0; k < contours.size(); k++) {
        byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size());
        Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 8);
    }
    new IViewer("HighRes Contours ", BReaderTools.bufferedImageFromMat(color));

    Point center = new Point(src.cols() / 2, src.rows() / 2);
    //Check hierarchy tree
    int[] res = polySearch(center, hierarchy, contours, 0);
    while (res[0] != 1 && res[2] != -1) {
        res = polySearch(center, hierarchy, contours, res[2]);
        if (res[0] == 1)
            break;
    }

    MatOfInt tHull = new MatOfInt();
    int index = 0;
    if (res[1] != -1) {
        index = res[1];
    }
    Imgproc.convexHull(contours.get(index), tHull);

    //get bounding box
    MatOfPoint cont = contours.get(index);
    Point[] points = new Point[tHull.rows()];
    for (int i = 0; i < tHull.rows(); i++) {
        int pIndex = (int) tHull.get(i, 0)[0];
        points[i] = new Point(cont.get(pIndex, 0));
    }
    Rect out = Imgproc.boundingRect(new MatOfPoint(points));
    return out;
}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Finds the bounding box for the book on the stand using 
 * the depth average image.//  www  .  java 2s  . c  o m
 * @param src- The Depth average image
 * @return Rectangle delineating the book
 */
public Rect lowResDist(Mat src) {
    Mat dst = src.clone();

    Imgproc.blur(src, dst, new Size(5, 5), new Point(-1, -1), Core.BORDER_REPLICATE);
    //      Imgproc.threshold(dst, dst, 0,255,Imgproc.THRESH_BINARY_INV+Imgproc.THRESH_OTSU);
    Imgproc.Canny(dst, dst, 50, 200, 3, false);
    //      Canny(src, dst, 20, 60, 3);

    List<MatOfPoint> contours = new LinkedList<>();
    Mat hierarchy = new Mat();
    /// Find contours
    Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE,
            new Point(0, 0));

    Mat color = new Mat();
    Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR);
    for (int k = 0; k < contours.size(); k++) {
        byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size());
        Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 1);
    }
    new IViewer("LowRes Contours ", BReaderTools.bufferedImageFromMat(color));

    for (int k = 0; k < contours.size(); k++) {
        MatOfPoint2f tMat = new MatOfPoint2f();
        Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(k).toArray()), tMat, 5, true);
        contours.set(k, new MatOfPoint(tMat.toArray()));
    }

    List<Point> points = new LinkedList<Point>();
    for (int i = 0; i < contours.size(); i++) {
        points.addAll(contours.get(i).toList());
    }

    MatOfInt tHull = new MatOfInt();
    Imgproc.convexHull(new MatOfPoint(points.toArray(new Point[points.size()])), tHull);

    //get bounding box
    Point[] tHullPoints = new Point[tHull.rows()];
    for (int i = 0; i < tHull.rows(); i++) {
        int pIndex = (int) tHull.get(i, 0)[0];
        tHullPoints[i] = points.get(pIndex);
    }
    Rect out = Imgproc.boundingRect(new MatOfPoint(tHullPoints));
    return out;
}

From source file:edu.soict.hust.k57.mmdb.components.HistogramImageBulder.java

private ImageIcon createImageIcon(Mat hist, int bin, Channel c) {
    int hist_w = 150; // width of the histogram image
    int hist_h = 100; // height of the histogram image
    int bin_w = (int) Math.round(hist_w * 1.0 / bin);

    Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC3, new Scalar(80, 60, 60));
    Mat normalizeHist = hist.clone();
    Core.normalize(normalizeHist, normalizeHist, 0, histImage.rows(), Core.NORM_MINMAX, -1, new Mat());

    Scalar scalar = null;/*from   w w  w .j  a v  a 2s  .  c om*/
    switch (c) {
    case B:
        scalar = new Scalar(255, 0, 0);
        break;
    case G:
        scalar = new Scalar(0, 255, 0);
        break;
    case R:
        scalar = new Scalar(0, 0, 255);
    }

    for (int i = 1; i < bin; i++) {
        Imgproc.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])), scalar, 1, 8, 0);
        Imgproc.line(histImage, new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i, 0)[0])), scalar, 1, 8, 0);
    }
    MatOfByte buffer = new MatOfByte();
    Imgcodecs.imencode(".png", histImage, buffer);
    return new ImageIcon(buffer.toArray());
}