Example usage for org.opencv.core Mat cols

List of usage examples for org.opencv.core Mat cols

Introduction

In this page you can find the example usage for org.opencv.core Mat cols.

Prototype

public int cols() 

Source Link

Usage

From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java

License:Apache License

/**
 * Matches concrete point of the eye by using template with TM_SQDIFF_NORMED
 *///  w ww. j  av a 2 s  .c o  m
private static void matchEye(Rect area, Mat builtTemplate, Mat matrixGray, Mat matrixRGBA) {
    Point matchLoc;
    try {
        // when there is not builtTemplate we skip it
        if (builtTemplate.cols() == 0 || builtTemplate.rows() == 0) {
            return;
        }
        Mat submatGray = matrixGray.submat(area);
        int cols = submatGray.cols() - builtTemplate.cols() + 1;
        int rows = submatGray.rows() - builtTemplate.rows() + 1;
        Mat outputTemplateMat = new Mat(cols, rows, CvType.CV_8U);

        Imgproc.matchTemplate(submatGray, builtTemplate, outputTemplateMat, Imgproc.TM_SQDIFF_NORMED);
        Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(outputTemplateMat);
        // when is difference in matching methods, the best match is max / min value
        matchLoc = minMaxLocResult.minLoc;
        Point matchLocTx = new Point(matchLoc.x + area.x, matchLoc.y + area.y);
        Point matchLocTy = new Point(matchLoc.x + builtTemplate.cols() + area.x,
                matchLoc.y + builtTemplate.rows() + area.y);

        FaceDrawerOpenCV.drawMatchedEye(matchLocTx, matchLocTy, matrixRGBA);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return//from w w w  .j a  va2s.co  m
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Detect the object inside the scene//w ww. j  a  v a 2  s.c  o  m
 * We also search the scale of the scene from 20% to 120% scale by steps
 * steps are 10%, with 0.6 accuracy
 * then when a good match is found, we search around by 5% scale steps with 0.7 accuracy
 * then when a good match is found, we search around by 2.5% scale steps with 0.8 accuracy
 * 
 * example:
 * first pass: scales are: 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200
 *             good matches are found around 600 and 700
 * second pass: scales are 550, 600, 650, 700, 750
 *             good matches are found at 650
 * third pass: scales are 625, 650, 675
 * 
 * The best match is at 675
 */
public void detectExactZoneWithScale() {

    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);

    List<TemplateMatchProperties> matches = Collections.synchronizedList(new ArrayList<>());

    Map<Integer, Double> scaleSteps = new LinkedHashMap<>();
    scaleSteps.put(100, 0.6);
    scaleSteps.put(50, 0.7);
    scaleSteps.put(25, 0.8);

    int currentStep = 100;

    Set<Integer> computedScales = new HashSet<>();

    while (currentStep >= 25) {
        final double currentThreshold = scaleSteps.get(currentStep);

        // first loop
        Set<Integer> localScales = Collections.synchronizedSet(new HashSet<>());
        if (currentStep == 100) {
            for (int scale = 200; scale < 1200; scale += currentStep) {
                localScales.add(scale);
            }
        } else {
            if (matches.isEmpty()) {
                throw new ImageSearchException("no matches");
            }
            for (TemplateMatchProperties tmpM : matches) {
                if (tmpM.isActive()) {
                    localScales.add(tmpM.getMatchScale() - currentStep);
                    localScales.add(tmpM.getMatchScale() + currentStep);
                }
            }
        }

        ExecutorService executorService = Executors
                .newFixedThreadPool(Runtime.getRuntime().availableProcessors());
        for (int scale : localScales) {
            if (computedScales.contains(scale)) {
                continue;
            }
            computedScales.add(scale);

            // resize to scale factor
            final int localScale = scale;
            Size sz = new Size(sceneImageMat.cols() * scale / 1000.0,
                    sceneImageMat.rows() * localScale / 1000.0);

            // skip if resized image is smaller than object
            if (sz.width < objectImageMat.cols() || sz.height < objectImageMat.rows()) {
                continue;
            }

            executorService.submit(() -> {

                Mat resizeSceneImageMat = new Mat();
                Imgproc.resize(sceneImageMat, resizeSceneImageMat, sz);

                try {
                    TemplateMatchProperties match = detectExactZone2(resizeSceneImageMat, objectImageMat,
                            localScale, currentThreshold);
                    matches.add(match);
                } catch (ImageSearchException e) {
                }

            });
        }

        executorService.shutdown();
        try {
            executorService.awaitTermination(10, TimeUnit.SECONDS);
        } catch (Exception e) {
            logger.info("Could not compute scale within 10 seconds", e);
        }

        // shortcut if we find a very good match
        double cleanThreshold = currentThreshold;
        matches.sort((TemplateMatchProperties t1,
                TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue())));
        if (!matches.isEmpty() && matches.get(0).getMatchValue() > 0.9) {
            cleanThreshold = 0.9;
            currentStep = Math.min(currentStep, 50);
        }
        currentStep = currentStep / 2;

        // clean matches from too low matching values
        for (TemplateMatchProperties t : matches) {
            if (t.getMatchValue() < cleanThreshold) {
                t.setActive(false);
            }
        }
    }

    // get the best match
    matches.sort((TemplateMatchProperties t1,
            TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue())));

    if (!matches.isEmpty()) {
        TemplateMatchProperties bestMatch = matches.get(0);
        if (bestMatch.getMatchValue() < 1 - detectionThreshold) {
            throw new ImageSearchException(
                    String.format("No match found for threshold %.2f, match found with value %.2f",
                            1 - detectionThreshold, bestMatch.getMatchValue()));
        }

        detectedRectangle = new Rectangle((int) (bestMatch.getMatchLoc().x / bestMatch.getDoubleScale()),
                (int) (bestMatch.getMatchLoc().y / bestMatch.getDoubleScale()),
                (int) (objectImageMat.rows() / bestMatch.getDoubleScale()),
                (int) (objectImageMat.cols() / bestMatch.getDoubleScale()));

        if (debug) {
            try {
                Imgproc.rectangle(sceneImageMat, new Point(detectedRectangle.x, detectedRectangle.y),
                        new Point(detectedRectangle.x + detectedRectangle.width,
                                detectedRectangle.y + detectedRectangle.height),
                        new Scalar(0, 255, 0));

                showResultingPicture(sceneImageMat);
            } catch (IOException e) {
            }
        }
        rotationAngle = 0;
        sizeRatio = detectedRectangle.width / (double) objectImageMat.cols();

    } else {
        throw new ImageSearchException("no matching has been found");
    }

}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

private MinMaxLocResult getBestTemplateMatching(int matchMethod, Mat sceneImageMat, Mat objectImageMat) {

    // / Create the result matrix
    int resultCols = sceneImageMat.cols() - objectImageMat.cols() + 1;
    int resultRows = sceneImageMat.rows() - objectImageMat.rows() + 1;
    Mat result = new Mat(resultRows, resultCols, CvType.CV_32FC1);

    // / Do the Matching and Normalize
    Imgproc.matchTemplate(sceneImageMat, objectImageMat, result, matchMethod);

    // / Localizing the best match with minMaxLoc        
    return Core.minMaxLoc(result);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

public Optional<Bounds> calibrateFrame(MatOfPoint2f boardCorners, Mat mat) {

    // For debugging
    Mat traceMat = null;//from   w ww .  j  a v  a  2  s . c o m
    if (logger.isTraceEnabled()) {
        traceMat = mat.clone();
    }

    initializeSize(mat.cols(), mat.rows());

    // Step 2: Estimate the pattern corners
    MatOfPoint2f estimatedPatternRect = estimatePatternRect(traceMat, boardCorners);

    // Step 3: Use Hough Lines to find the actual corners
    final Optional<MatOfPoint2f> idealCorners = findIdealCorners(mat, estimatedPatternRect);

    if (!idealCorners.isPresent())
        return Optional.empty();

    if (logger.isTraceEnabled()) {
        String filename = String.format("calibrate-dist.png");
        final File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Step 4: Initialize the warp matrix and bounding box
    initializeWarpPerspective(mat, idealCorners.get());

    if (boundingBox.getMinX() < 0 || boundingBox.getMinY() < 0
            || boundingBox.getWidth() > cameraManager.getFeedWidth()
            || boundingBox.getHeight() > cameraManager.getFeedHeight()) {
        return Optional.empty();
    }

    if (logger.isDebugEnabled())
        logger.debug("bounds {} {} {} {}", boundingBox.getMinX(), boundingBox.getMinY(), boundingBox.getWidth(),
                boundingBox.getHeight());

    final Mat undistorted = warpPerspective(mat);

    if (logger.isTraceEnabled()) {

        String filename = String.format("calibrate-undist.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, undistorted);

        Mat undistortedCropped = undistorted.submat((int) boundingBox.getMinY(), (int) boundingBox.getMaxY(),
                (int) boundingBox.getMinX(), (int) boundingBox.getMaxX());

        filename = String.format("calibrate-undist-cropped.png");
        file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, undistortedCropped);
    }

    Mat warpedBoardCorners = warpCorners(boardCorners);

    isCalibrated = true;

    if (calculateFrameDelay) {
        findColors(undistorted, warpedBoardCorners);

        final double squareHeight = boundingBox.getHeight() / (double) (PATTERN_HEIGHT + 1);
        final double squareWidth = boundingBox.getWidth() / (double) (PATTERN_WIDTH + 1);

        int secondSquareCenterX = (int) (boundingBox.getMinX() + (squareWidth * 1.5));
        int secondSquareCenterY = (int) (boundingBox.getMinY() + (squareHeight * .5));

        if (logger.isDebugEnabled())
            logger.debug("pF getFrameDelayPixel x {} y {} p {}", secondSquareCenterX, secondSquareCenterY,
                    undistorted.get(secondSquareCenterY, secondSquareCenterX));

    }

    return Optional.of(boundingBox);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

/**
 * Perspective pattern discovery/*from   w  w w .  java 2 s. c o m*/
 * 
 * Works similar to arena calibration but does not try to identify the
 * outline of the projection area We are only concerned with size, not
 * alignment or angle
 * 
 * This function blanks out the pattern that it discovers in the Mat it is
 * provided. This is so that the pattern is not discovered by future pattern
 * discovery, e.g. auto-calibration
 * 
 * workingMat should be null for all external callers unless there is some
 * need to work off a different Mat than is having patterns blanked out by
 * this function
 */
public Optional<Dimension2D> findPaperPattern(MatOfPoint2f boardCorners, Mat mat, Mat workingMat) {

    if (workingMat == null)
        workingMat = mat.clone();

    initializeSize(workingMat.cols(), workingMat.rows());

    // Step 2: Estimate the pattern corners
    final BoundingBox box = getPaperPatternDimensions(workingMat, boardCorners);

    // OpenCV gives us the checkerboard corners, not the outside dimension
    // So this estimates where the outside corner would be, plus a fudge
    // factor for the edge of the paper
    // Printer margins are usually a quarter inch on each edge
    double width = ((double) box.getWidth() * ((double) (PATTERN_WIDTH + 1) / (double) (PATTERN_WIDTH - 1))
            * 1.048);
    double height = ((double) box.getHeight() * ((double) (PATTERN_HEIGHT + 1) / (double) (PATTERN_HEIGHT - 1))
            * 1.063);

    final double PAPER_PATTERN_SIZE_THRESHOLD = .25;
    if (width > PAPER_PATTERN_SIZE_THRESHOLD * workingMat.cols()
            || height > PAPER_PATTERN_SIZE_THRESHOLD * workingMat.rows()) {
        logger.trace("Pattern too big to be paper, must be projection, setting blank {} x {}", box.getWidth(),
                box.getHeight());

        workingMat.submat((int) box.getMinY(), (int) box.getMaxY(), (int) box.getMinX(), (int) box.getMaxX())
                .setTo(new Scalar(0, 0, 0));

        if (logger.isTraceEnabled()) {
            String filename = String.format("blanked-box.png");
            File file = new File(filename);
            filename = file.toString();
            Highgui.imwrite(filename, workingMat);

        }

        final Optional<MatOfPoint2f> boardCornersNew = findChessboard(workingMat);

        if (!boardCornersNew.isPresent())
            return Optional.empty();

        logger.trace("Found new pattern, attempting findPaperPattern {}", boardCornersNew.get());

        return findPaperPattern(boardCornersNew.get(), mat, workingMat);

    }

    if (logger.isTraceEnabled()) {
        logger.trace("pattern width {} height {}", box.getWidth(), box.getHeight());

        logger.trace("paper width {} height {}", width, height);

        int widthOffset = ((int) width - (int) box.getWidth()) / 2;
        int heightOffset = ((int) height - (int) box.getHeight()) / 2;

        logger.trace("offset width {} height {}", widthOffset, heightOffset);

        Mat fullpattern = workingMat.clone();

        // TODO: This doesn't work if the pattern is upside down, but this is for debugging anyway right now
        // Should fix in case it causes an out of bounds or something
        Point topLeft = new Point(boardCorners.get(0, 0)[0], boardCorners.get(0, 0)[1]);
        Point topRight = new Point(boardCorners.get(PATTERN_WIDTH - 1, 0)[0],
                boardCorners.get(PATTERN_WIDTH - 1, 0)[1]);
        Point bottomRight = new Point(boardCorners.get(PATTERN_WIDTH * PATTERN_HEIGHT - 1, 0)[0],
                boardCorners.get(PATTERN_WIDTH * PATTERN_HEIGHT - 1, 0)[1]);
        Point bottomLeft = new Point(boardCorners.get(PATTERN_WIDTH * (PATTERN_HEIGHT - 1), 0)[0],
                boardCorners.get(PATTERN_WIDTH * (PATTERN_HEIGHT - 1), 0)[1]);

        Core.circle(fullpattern, topLeft, 1, new Scalar(255, 0, 0), -1);
        Core.circle(fullpattern, topRight, 1, new Scalar(255, 0, 0), -1);
        Core.circle(fullpattern, bottomRight, 1, new Scalar(255, 0, 0), -1);
        Core.circle(fullpattern, bottomLeft, 1, new Scalar(255, 0, 0), -1);

        String filename = String.format("marked-box.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, fullpattern);

        fullpattern = fullpattern.submat((int) box.getMinY() - heightOffset,
                (int) box.getMinY() - heightOffset + (int) height, (int) box.getMinX() - widthOffset,
                (int) box.getMinX() - widthOffset + (int) width);

        filename = String.format("full-box.png");
        file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, fullpattern);

        Mat cropped = workingMat.submat((int) box.getMinY(), (int) box.getMaxY(), (int) box.getMinX(),
                (int) box.getMaxX());

        filename = String.format("pattern-box.png");
        file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, cropped);
    }

    mat.submat((int) box.getMinY(), (int) box.getMaxY(), (int) box.getMinX(), (int) box.getMaxX())
            .setTo(new Scalar(0, 0, 0));

    return Optional.of(new Dimension2D(width, height));
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private Optional<MatOfPoint2f> findIdealCorners(final Mat frame, final MatOfPoint2f estimatedPatternRect) {
    Mat traceMat = null;/*from  w ww  .  j a  v  a  2  s. c om*/
    if (logger.isTraceEnabled()) {
        Mat traceMatTemp = frame.clone();
        traceMat = new Mat();

        Imgproc.cvtColor(traceMatTemp, traceMat, Imgproc.COLOR_GRAY2BGR);
    }

    // pixel distance, dynamic because we want to allow any resolution or
    // distance from pattern
    final int toleranceThreshold = (int) (minimumDimension / (double) (PATTERN_HEIGHT - 1) / 1.5);

    // Grey scale conversion.
    //final Mat grey = new Mat();
    //Imgproc.cvtColor(frame, grey, Imgproc.COLOR_BGR2GRAY);
    final Mat grey = frame;

    // Find edges
    Imgproc.Canny(grey, grey, CANNY_THRESHOLD_1, CANNY_THRESHOLD_2);

    // Blur the lines, otherwise the lines algorithm does not consider them
    Imgproc.GaussianBlur(grey, grey, gaussianBlurSize, GAUSSIANBLUR_SIGMA);

    if (logger.isTraceEnabled()) {
        logger.trace("tolerance threshold {} minimumDimension {}", toleranceThreshold, minimumDimension);

        String filename = String.format("calibrate-undist-grey-lines.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, grey);
    }

    if (logger.isDebugEnabled())
        logger.debug("estimation {} {} {} {}", estimatedPatternRect.get(0, 0), estimatedPatternRect.get(1, 0),
                estimatedPatternRect.get(2, 0), estimatedPatternRect.get(3, 0));

    // Easier to work off of Points
    final Point[] estimatedPoints = matOfPoint2fToPoints(estimatedPatternRect);

    if (logger.isTraceEnabled()) {
        Core.circle(traceMat, estimatedPoints[0], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[1], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[2], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[3], 1, new Scalar(0, 0, 255), -1);
    }

    // Find lines
    // These parameters are just guesswork right now
    final Mat mLines = new Mat();
    final int minLineSize = (int) (minimumDimension * .90);
    final int lineGap = toleranceThreshold;

    // Do it
    Imgproc.HoughLinesP(grey, mLines, HOUGHLINES_RHO, HOUGHLINES_THETA, HOUGHLINES_THRESHOLD, minLineSize,
            lineGap);

    // Find the lines that match our estimates
    final Set<double[]> verifiedLines = new HashSet<double[]>();

    for (int x = 0; x < mLines.cols(); x++) {
        final double[] vec = mLines.get(0, x);
        final double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        final Point start = new Point(x1, y1);
        final Point end = new Point(x2, y2);

        if (nearPoints(estimatedPoints, start, toleranceThreshold)
                && nearPoints(estimatedPoints, end, toleranceThreshold)) {
            verifiedLines.add(vec);

            if (logger.isTraceEnabled()) {
                Core.line(traceMat, start, end, new Scalar(255, 0, 0), 1);
            }
        }
    }

    if (logger.isTraceEnabled())
        logger.trace("verifiedLines: {}", verifiedLines.size());

    // Reduce the lines to possible corners
    final Set<Point> possibleCorners = new HashSet<Point>();

    for (double[] line1 : verifiedLines) {
        for (double[] line2 : verifiedLines) {
            if (line1 == line2)
                continue;

            Optional<Point> intersection = computeIntersect(line1, line2);

            if (intersection.isPresent())
                possibleCorners.add(intersection.get());
        }
    }

    // Reduce the possible corners to ideal corners
    Point[] idealCorners = new Point[4];
    final double[] idealDistances = { toleranceThreshold, toleranceThreshold, toleranceThreshold,
            toleranceThreshold };

    for (Point pt : possibleCorners) {
        for (int i = 0; i < 4; i++) {
            final double distance = euclideanDistance(pt, estimatedPoints[i]);

            if (distance < idealDistances[i]) {
                idealDistances[i] = distance;
                idealCorners[i] = pt;
            }
        }
    }

    if (logger.isTraceEnabled()) {
        logger.trace("idealDistances {} {} {} {}", idealDistances[0], idealDistances[1], idealDistances[2],
                idealDistances[3]);

        String filename = String.format("calibrate-lines.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Verify that we have the corners we need
    for (Point pt : idealCorners) {
        if (pt == null)
            return Optional.empty();

        if (logger.isTraceEnabled()) {
            logger.trace("idealCorners {}", pt);
            Core.circle(traceMat, pt, 1, new Scalar(0, 255, 255), -1);
        }
    }

    if (logger.isTraceEnabled()) {
        String filename = String.format("calibrate-lines-with-corners.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Sort them into the correct order
    // 1st-------2nd
    // | |
    // | |
    // | |
    // 3rd-------4th
    idealCorners = sortCorners(idealCorners);

    // build the MatofPoint2f
    final MatOfPoint2f sourceCorners = new MatOfPoint2f();
    sourceCorners.alloc(4);

    for (int i = 0; i < 4; i++) {
        sourceCorners.put(i, 0, new double[] { idealCorners[i].x, idealCorners[i].y });
    }

    return Optional.of(sourceCorners);
}

From source file:com.shootoff.camera.shotdetection.JavaShotDetector.java

License:Open Source License

private Set<Pixel> findThresholdPixelsAndUpdateFilter(final Mat workingFrame, final boolean detectShots) {
    dynamicallyThresholded = 0;//from   w ww  .ja  va  2 s  .c  o  m

    final Set<Pixel> thresholdPixels = Collections.synchronizedSet(new HashSet<Pixel>());

    if (!cameraManager.isDetecting())
        return thresholdPixels;

    final int subWidth = workingFrame.cols() / SECTOR_COLUMNS;
    final int subHeight = workingFrame.rows() / SECTOR_ROWS;

    final int cols = workingFrame.cols();
    final int channels = workingFrame.channels();

    final int size = (int) (workingFrame.total() * channels);
    final byte[] workingFramePrimitive = new byte[size];
    workingFrame.get(0, 0, workingFramePrimitive);

    // In this loop we accomplish both MovingAverage updates AND threshold
    // pixel detection
    Parallel.forIndex(0, (SECTOR_ROWS * SECTOR_COLUMNS), 1, new Operation<Integer>() {
        public void perform(Integer sector) {
            final int sectorX = sector.intValue() % SECTOR_COLUMNS;
            final int sectorY = sector.intValue() / SECTOR_ROWS;

            if (!cameraManager.isSectorOn(sectorX, sectorY))
                return;

            final int startX = subWidth * sectorX;
            final int startY = subHeight * sectorY;

            for (int y = startY; y < startY + subHeight; y++) {
                final int yOffset = y * cols;
                for (int x = startX; x < startX + subWidth; x++) {
                    final int currentH = workingFramePrimitive[(yOffset + x) * channels] & 0xFF;
                    final int currentS = workingFramePrimitive[(yOffset + x) * channels + 1] & 0xFF;
                    final int currentV = workingFramePrimitive[(yOffset + x) * channels + 2] & 0xFF;

                    final Pixel pixel = updateFilter(currentH, currentS, currentV, x, y, detectShots);

                    if (pixel != null)
                        thresholdPixels.add(pixel);
                }
            }
        }
    });

    return thresholdPixels;
}

From source file:com.shootoff.camera.shotdetection.PixelCluster.java

License:Open Source License

public double getColorDifference(final Mat workingFrame, final int[][] colorDistanceFromRed) {
    final Map<Pixel, byte[]> visited = new HashMap<Pixel, byte[]>();
    int avgSaturation = 0;

    for (final Pixel pixel : this) {
        if (pixel.getConnectedness() < MAXIMUM_CONNECTEDNESS) {
            for (int h = -1; h <= 1; h++) {
                for (int w = -1; w <= 1; w++) {
                    if (h == 0 && w == 0)
                        continue;

                    final int rx = pixel.x + w;
                    final int ry = pixel.y + h;

                    if (rx < 0 || ry < 0 || rx >= workingFrame.cols() || ry >= workingFrame.rows())
                        continue;

                    final Pixel nearPoint = new Pixel(rx, ry);

                    if (!visited.containsKey(nearPoint) && !this.contains(nearPoint)) {
                        byte[] np = { 0, 0, 0 };
                        workingFrame.get(ry, rx, np);
                        final int npSaturation = np[1] & 0xFF;

                        avgSaturation += npSaturation;

                        visited.put(nearPoint, np);
                    }// ww w. j  a  v a 2s  . com
                }
            }
        }
    }

    final int pixelCount = visited.size();
    if (pixelCount == 0)
        return 0;

    avgSaturation /= pixelCount;

    int colorDistance = 0;
    int avgColorDistance = 0;
    int tempColorDistance = 0;

    for (final Entry<Pixel, byte[]> pixelEntry : visited.entrySet()) {
        byte[] np = pixelEntry.getValue();
        final int npSaturation = np[1] & 0xFF;

        if (npSaturation > avgSaturation) {
            final int npColor = np[0] & 0xFF;
            final int npLum = np[2] & 0xFF;

            final int thisDFromRed = Math.min(npColor, Math.abs(180 - npColor)) * npLum * npSaturation;
            final int thisDFromGreen = Math.abs(60 - npColor) * npLum * npSaturation;

            final int currentCol = thisDFromRed - thisDFromGreen;

            final Pixel pixel = pixelEntry.getKey();
            colorDistance += currentCol
                    - (int) (CURRENT_COLOR_BIAS_MULTIPLIER * colorDistanceFromRed[pixel.x][pixel.y]);

            if (logger.isTraceEnabled()) {
                tempColorDistance += currentCol;
                avgColorDistance += colorDistanceFromRed[pixel.x][pixel.y];
            }
        }
    }

    if (logger.isTraceEnabled())
        logger.trace("Pixels {} Color {} avg {} sum {}", pixelCount, colorDistance / pixelCount,
                avgColorDistance / pixelCount, tempColorDistance / pixelCount);

    return colorDistance;
}

From source file:com.sikulix.core.Finder.java

License:Open Source License

private static void printMatI(Mat mat) {
    int[] data = new int[mat.channels()];
    for (int r = 0; r < mat.rows(); r++) {
        for (int c = 0; c < mat.cols(); c++) {
            mat.get(r, c, data);//from   w w  w. j av a2 s  .  c  o  m
            log.trace("(%d, %d) %s", r, c, Arrays.toString(data));
        }
    }
}