Example usage for org.opencv.core Mat clone

List of usage examples for org.opencv.core Mat clone

Introduction

In this page you can find the example usage for org.opencv.core Mat clone.

Prototype

public Mat clone() 

Source Link

Usage

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

@Deprecated
private Mat drawNeuronsCenters(Mat src) {
    if (src == null) {
        src = sourceImage.clone();// w w  w.j ava  2 s.c om
    }
    if (neurons == null) {
        return src;
    }

    Mat result = src.clone();
    Scalar color = new Scalar(250, 10, 19);

    for (Neuron neuron : neurons) {
        Imgproc.circle(result, neuron.getCenter(), 4, color, 2);
    }

    return result;
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.Blur.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }//from   www  . j a  v  a2s  . c o  m
    Mat tmp = frame.clone();
    // Apply gaussian blur
    for (int i = 0; i < REPETITIONS; i++) {
        Imgproc.GaussianBlur(tmp, tmp, new Size(KERNEL_SIZE, KERNEL_SIZE), 0);
    }
    return tmp;
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.ContoursFinder.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }/*w ww  .j  ava2 s.  c  o m*/
    Mat tmp = frame.clone();
    // Finding outer contours
    contourList.clear();
    Imgproc.findContours(tmp, contourList, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    // Filter bees
    Mat contours = new Mat(tmp.rows(), tmp.cols(), CvType.CV_8UC3);
    tmp.release();
    double area;
    Scalar color;
    numBees = 0;
    for (int i = 0; i < contourList.size(); i++) {
        area = Imgproc.contourArea(contourList.get(i));
        if (area > minArea && area < maxArea) {
            color = GREEN;
            numBees++;
        } else {
            color = RED;
        }
        // Draw contour
        Imgproc.drawContours(contours, contourList, i, color, -1);
    }
    return contours;
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.Morphology.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }//from  w ww . j  av a 2  s  .c om
    Mat tmp = frame.clone();
    // Step 1: erode to remove legs
    Imgproc.erode(tmp, tmp, KERNEL3);
    // Step 2: dilate to join bodies and heads
    Imgproc.dilate(tmp, tmp, KERNEL2);
    for (int i = 0; i < REPETITIONS_DILATE; i++) {
        Imgproc.dilate(tmp, tmp, kernelDilate);
    }
    // Step 3: erode to recover original size
    Imgproc.erode(tmp, tmp, KERNEL1);
    for (int i = 0; i < REPETITIONS_ERODE; i++) {
        Imgproc.erode(tmp, tmp, kernelErode);
    }
    return tmp;
}

From source file:com.example.sarthuak.opencv.MainActivity.java

public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    // TODO Auto-generated method stub
    final int viewMode = mViewMode;
    switch (viewMode) {

    case VIEW_MODE_RGBA:
        // input frame has RBGA format
        mRgba = inputFrame.rgba();/*from ww w .  j ava  2 s.c om*/
        break;
    case VIEW_MODE_CANNY:
        // input frame has gray scale format
        mRgba = inputFrame.rgba();
        Imgproc.Canny(inputFrame.gray(), mRgbaF, 80, 100);
        Imgproc.cvtColor(mRgbaF, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
        break;

    case VIEW_MODE_ocr:
        startActivity(new Intent(this, ScanLicensePlateActivity.class));
        break;

    case VIEW_MODE_new:
        Mat mRgba;

        mRgba = inputFrame.rgba();
        drawing = mRgba.clone();

        mRgbaT = drawing;

        Imgproc.cvtColor(drawing, mRgbaT, Imgproc.COLOR_BGR2GRAY);

        org.opencv.core.Size s = new Size(1, 1);
        Imgproc.GaussianBlur(mRgbaT, mRgbaT, s, 0, 0);

        Imgproc.Canny(mRgbaT, mRgbaT, 100, 255);
        Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(5, 5));
        Imgproc.dilate(mRgbaT, mRgbaT, element);
        List<MatOfPoint> contours = new ArrayList<>();

        Imgproc.findContours(drawing, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE,
                new Point(0, 0));
        double maxArea = -1;
        int maxAreaIdx = -1;

        for (int idx = 0; idx < contours.size(); idx++) {
            Mat contour = contours.get(idx);

            double contourarea = Imgproc.contourArea(contour);
            if (contourarea > maxArea) {

                maxArea = contourarea;
                maxAreaIdx = idx;
            }
        }

        Imgproc.drawContours(mRgba, contours, maxAreaIdx, new Scalar(255, 0, 0), 5);

    }
    return mRgba; // This function must return

}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

public List<Point> extractPoints() {
    Mat gray = new Mat();//work_width, work_height, CvType.CV_8UC1);
    Mat binary = new Mat();

    Mat kernel = Mat.ones(3, 3, CvType.CV_8UC1);

    debugreset();//from  ww  w. ja  v a  2 s.c  o m

    Mat image = load_transformed();
    working_image = image.clone();
    debugsave(image, "source");

    Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY);
    debugsave(gray, "grayscale");

    Imgproc.GaussianBlur(gray, gray, new Size(15, 15), 0);
    debugsave(gray, "blurred");

    //Imgproc.equalizeHist(gray, gray);
    //debugsave(gray, "equalized");

    Imgproc.adaptiveThreshold(gray, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV,
            129, 5);
    //Imgproc.threshold(gray, binary, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
    //Imgproc.threshold(gray, binary, 128, 255, Imgproc.THRESH_BINARY_INV);
    debugsave(binary, "binary");

    Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_CLOSE, kernel);
    debugsave(binary, "closed");

    Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_OPEN, kernel);
    debugsave(binary, "opened");

    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(binary, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); // is binary is now changed
    Imgproc.drawContours(image, contours, -1, new Scalar(0, 0, 255), 3);
    debugsave(image, "contours");

    List<PointAndArea> points = new ArrayList<>();

    for (MatOfPoint cnt : contours) {
        MatOfPoint2f c2f = new MatOfPoint2f();
        c2f.fromArray(cnt.toArray());
        RotatedRect rr = Imgproc.minAreaRect(c2f);

        double area = Imgproc.contourArea(cnt);

        if (rr.size.width / rr.size.height < 3 && rr.size.height / rr.size.width < 3 && rr.size.width < 64
                && rr.size.height < 64 && area > 9 && area < 10000) {
            points.add(new PointAndArea((int) area, rr.center));
        }
    }

    List<Point> final_points = new ArrayList<>();

    Collections.sort(points);
    Collections.reverse(points);
    int prev = -1;
    for (PointAndArea p : points) {
        Log.i("area", Integer.toString(p.area));
        if (prev == -1 || p.area >= prev / 2) {
            prev = p.area;
            Imgproc.circle(image, p.point, 10, new Scalar(0, 255, 0), 5);
            final_points.add(new Point(1 - p.point.y / work_height, 1 - p.point.x / work_width));
        }
    }
    debugsave(image, "circles");

    return final_points;
}

From source file:com.ibm.streamsx.edgevideo.device.FaceDetector.java

License:Open Source License

public List<Mat> extractFaces(Mat rgbFrame, MatOfRect faceRects) {
    List<Mat> faces = new ArrayList<>();
    for (Rect faceRect : faceRects.toArray()) {
        Mat rgbFaceFrame = new Mat(rgbFrame, faceRect);
        rgbFaceFrame = rgbFaceFrame.clone(); // don't inherit subsequent changes to rgbFrame
        faces.add(rgbFaceFrame);/*from   w  w  w.j  av  a 2s.co  m*/
    }
    return faces;
}

From source file:com.jeremydyer.nifi.ObjectDetectionProcessor.java

License:Apache License

final public Mat detectObjects(final ProcessSession session, FlowFile original, final JSONObject dd,
        final Mat image) {

    CascadeClassifier objectDetector = new CascadeClassifier(dd.getString("opencv_xml_cascade_path"));
    MatOfRect objectDetections = new MatOfRect();
    objectDetector.detectMultiScale(image, objectDetections);
    //getLogger().error("Detected " + objectDetections.toArray().length + " " + dd.getString("name") + " objects in the input flowfile");

    final AtomicReference<Mat> croppedImageReference = new AtomicReference<>();

    int counter = 0;
    for (int i = 0; i < objectDetections.toArray().length; i++) {
        final Rect rect = objectDetections.toArray()[i];
        FlowFile detection = session.write(session.create(original), new OutputStreamCallback() {
            @Override//from  w w  w.  j  a v a2 s . c o m
            public void process(OutputStream outputStream) throws IOException {

                Mat croppedImage = null;

                //Should the image be cropped? If so there is no need to draw bounds because that would be the same as the cropping
                if (dd.getBoolean("crop")) {
                    Rect rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
                    croppedImage = new Mat(image, rectCrop);
                    MatOfByte updatedImage = new MatOfByte();
                    Imgcodecs.imencode(".jpg", croppedImage, updatedImage);
                    croppedImageReference.set(croppedImage);
                    outputStream.write(updatedImage.toArray());
                } else {
                    //Should the image have a border drawn around it?
                    if (dd.getBoolean("drawBounds")) {
                        Mat imageWithBorder = image.clone();
                        Imgproc.rectangle(imageWithBorder, new Point(rect.x, rect.y),
                                new Point(rect.x + rect.width, rect.y + rect.height),
                                new Scalar(255, 255, 255));
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", imageWithBorder, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    } else {
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", image, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    }
                }

            }
        });

        Map<String, String> atts = new HashMap<>();
        atts.put("object.detection.name", dd.getString("name"));
        atts.put("object.detection.id", new Long(System.currentTimeMillis() + counter).toString());

        counter++;

        detection = session.putAllAttributes(detection, atts);
        session.transfer(detection, REL_OBJECT_DETECTED);
    }

    Mat childResponse = null;

    if (croppedImageReference.get() != null) {
        childResponse = croppedImageReference.get();
    } else {
        childResponse = image;
    }

    if (dd.has("children")) {
        JSONArray children = dd.getJSONArray("children");
        if (children != null) {

            for (int i = 0; i < children.length(); i++) {
                JSONObject ddd = children.getJSONObject(i);
                childResponse = detectObjects(session, original, ddd, childResponse);
            }
        }
    }

    return childResponse;
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

public Optional<Bounds> calibrateFrame(MatOfPoint2f boardCorners, Mat mat) {

    // For debugging
    Mat traceMat = null;//  ww  w.  ja  v a 2  s  .  c  om
    if (logger.isTraceEnabled()) {
        traceMat = mat.clone();
    }

    initializeSize(mat.cols(), mat.rows());

    // Step 2: Estimate the pattern corners
    MatOfPoint2f estimatedPatternRect = estimatePatternRect(traceMat, boardCorners);

    // Step 3: Use Hough Lines to find the actual corners
    final Optional<MatOfPoint2f> idealCorners = findIdealCorners(mat, estimatedPatternRect);

    if (!idealCorners.isPresent())
        return Optional.empty();

    if (logger.isTraceEnabled()) {
        String filename = String.format("calibrate-dist.png");
        final File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Step 4: Initialize the warp matrix and bounding box
    initializeWarpPerspective(mat, idealCorners.get());

    if (boundingBox.getMinX() < 0 || boundingBox.getMinY() < 0
            || boundingBox.getWidth() > cameraManager.getFeedWidth()
            || boundingBox.getHeight() > cameraManager.getFeedHeight()) {
        return Optional.empty();
    }

    if (logger.isDebugEnabled())
        logger.debug("bounds {} {} {} {}", boundingBox.getMinX(), boundingBox.getMinY(), boundingBox.getWidth(),
                boundingBox.getHeight());

    final Mat undistorted = warpPerspective(mat);

    if (logger.isTraceEnabled()) {

        String filename = String.format("calibrate-undist.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, undistorted);

        Mat undistortedCropped = undistorted.submat((int) boundingBox.getMinY(), (int) boundingBox.getMaxY(),
                (int) boundingBox.getMinX(), (int) boundingBox.getMaxX());

        filename = String.format("calibrate-undist-cropped.png");
        file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, undistortedCropped);
    }

    Mat warpedBoardCorners = warpCorners(boardCorners);

    isCalibrated = true;

    if (calculateFrameDelay) {
        findColors(undistorted, warpedBoardCorners);

        final double squareHeight = boundingBox.getHeight() / (double) (PATTERN_HEIGHT + 1);
        final double squareWidth = boundingBox.getWidth() / (double) (PATTERN_WIDTH + 1);

        int secondSquareCenterX = (int) (boundingBox.getMinX() + (squareWidth * 1.5));
        int secondSquareCenterY = (int) (boundingBox.getMinY() + (squareHeight * .5));

        if (logger.isDebugEnabled())
            logger.debug("pF getFrameDelayPixel x {} y {} p {}", secondSquareCenterX, secondSquareCenterY,
                    undistorted.get(secondSquareCenterY, secondSquareCenterX));

    }

    return Optional.of(boundingBox);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

/**
 * Perspective pattern discovery//from   w w w  .  j  a  va2  s  .c  o  m
 * 
 * Works similar to arena calibration but does not try to identify the
 * outline of the projection area We are only concerned with size, not
 * alignment or angle
 * 
 * This function blanks out the pattern that it discovers in the Mat it is
 * provided. This is so that the pattern is not discovered by future pattern
 * discovery, e.g. auto-calibration
 * 
 * workingMat should be null for all external callers unless there is some
 * need to work off a different Mat than is having patterns blanked out by
 * this function
 */
public Optional<Dimension2D> findPaperPattern(MatOfPoint2f boardCorners, Mat mat, Mat workingMat) {

    if (workingMat == null)
        workingMat = mat.clone();

    initializeSize(workingMat.cols(), workingMat.rows());

    // Step 2: Estimate the pattern corners
    final BoundingBox box = getPaperPatternDimensions(workingMat, boardCorners);

    // OpenCV gives us the checkerboard corners, not the outside dimension
    // So this estimates where the outside corner would be, plus a fudge
    // factor for the edge of the paper
    // Printer margins are usually a quarter inch on each edge
    double width = ((double) box.getWidth() * ((double) (PATTERN_WIDTH + 1) / (double) (PATTERN_WIDTH - 1))
            * 1.048);
    double height = ((double) box.getHeight() * ((double) (PATTERN_HEIGHT + 1) / (double) (PATTERN_HEIGHT - 1))
            * 1.063);

    final double PAPER_PATTERN_SIZE_THRESHOLD = .25;
    if (width > PAPER_PATTERN_SIZE_THRESHOLD * workingMat.cols()
            || height > PAPER_PATTERN_SIZE_THRESHOLD * workingMat.rows()) {
        logger.trace("Pattern too big to be paper, must be projection, setting blank {} x {}", box.getWidth(),
                box.getHeight());

        workingMat.submat((int) box.getMinY(), (int) box.getMaxY(), (int) box.getMinX(), (int) box.getMaxX())
                .setTo(new Scalar(0, 0, 0));

        if (logger.isTraceEnabled()) {
            String filename = String.format("blanked-box.png");
            File file = new File(filename);
            filename = file.toString();
            Highgui.imwrite(filename, workingMat);

        }

        final Optional<MatOfPoint2f> boardCornersNew = findChessboard(workingMat);

        if (!boardCornersNew.isPresent())
            return Optional.empty();

        logger.trace("Found new pattern, attempting findPaperPattern {}", boardCornersNew.get());

        return findPaperPattern(boardCornersNew.get(), mat, workingMat);

    }

    if (logger.isTraceEnabled()) {
        logger.trace("pattern width {} height {}", box.getWidth(), box.getHeight());

        logger.trace("paper width {} height {}", width, height);

        int widthOffset = ((int) width - (int) box.getWidth()) / 2;
        int heightOffset = ((int) height - (int) box.getHeight()) / 2;

        logger.trace("offset width {} height {}", widthOffset, heightOffset);

        Mat fullpattern = workingMat.clone();

        // TODO: This doesn't work if the pattern is upside down, but this is for debugging anyway right now
        // Should fix in case it causes an out of bounds or something
        Point topLeft = new Point(boardCorners.get(0, 0)[0], boardCorners.get(0, 0)[1]);
        Point topRight = new Point(boardCorners.get(PATTERN_WIDTH - 1, 0)[0],
                boardCorners.get(PATTERN_WIDTH - 1, 0)[1]);
        Point bottomRight = new Point(boardCorners.get(PATTERN_WIDTH * PATTERN_HEIGHT - 1, 0)[0],
                boardCorners.get(PATTERN_WIDTH * PATTERN_HEIGHT - 1, 0)[1]);
        Point bottomLeft = new Point(boardCorners.get(PATTERN_WIDTH * (PATTERN_HEIGHT - 1), 0)[0],
                boardCorners.get(PATTERN_WIDTH * (PATTERN_HEIGHT - 1), 0)[1]);

        Core.circle(fullpattern, topLeft, 1, new Scalar(255, 0, 0), -1);
        Core.circle(fullpattern, topRight, 1, new Scalar(255, 0, 0), -1);
        Core.circle(fullpattern, bottomRight, 1, new Scalar(255, 0, 0), -1);
        Core.circle(fullpattern, bottomLeft, 1, new Scalar(255, 0, 0), -1);

        String filename = String.format("marked-box.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, fullpattern);

        fullpattern = fullpattern.submat((int) box.getMinY() - heightOffset,
                (int) box.getMinY() - heightOffset + (int) height, (int) box.getMinX() - widthOffset,
                (int) box.getMinX() - widthOffset + (int) width);

        filename = String.format("full-box.png");
        file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, fullpattern);

        Mat cropped = workingMat.submat((int) box.getMinY(), (int) box.getMaxY(), (int) box.getMinX(),
                (int) box.getMaxX());

        filename = String.format("pattern-box.png");
        file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, cropped);
    }

    mat.submat((int) box.getMinY(), (int) box.getMaxY(), (int) box.getMinX(), (int) box.getMaxX())
            .setTo(new Scalar(0, 0, 0));

    return Optional.of(new Dimension2D(width, height));
}