List of usage examples for org.opencv.core Mat Mat
public Mat()
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Detect the object inside the scene// w ww . j a va 2 s.co m * We also search the scale of the scene from 20% to 120% scale by steps * steps are 10%, with 0.6 accuracy * then when a good match is found, we search around by 5% scale steps with 0.7 accuracy * then when a good match is found, we search around by 2.5% scale steps with 0.8 accuracy * * example: * first pass: scales are: 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200 * good matches are found around 600 and 700 * second pass: scales are 550, 600, 650, 700, 750 * good matches are found at 650 * third pass: scales are 625, 650, 675 * * The best match is at 675 */ public void detectExactZoneWithScale() { Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); List<TemplateMatchProperties> matches = Collections.synchronizedList(new ArrayList<>()); Map<Integer, Double> scaleSteps = new LinkedHashMap<>(); scaleSteps.put(100, 0.6); scaleSteps.put(50, 0.7); scaleSteps.put(25, 0.8); int currentStep = 100; Set<Integer> computedScales = new HashSet<>(); while (currentStep >= 25) { final double currentThreshold = scaleSteps.get(currentStep); // first loop Set<Integer> localScales = Collections.synchronizedSet(new HashSet<>()); if (currentStep == 100) { for (int scale = 200; scale < 1200; scale += currentStep) { localScales.add(scale); } } else { if (matches.isEmpty()) { throw new ImageSearchException("no matches"); } for (TemplateMatchProperties tmpM : matches) { if (tmpM.isActive()) { localScales.add(tmpM.getMatchScale() - currentStep); localScales.add(tmpM.getMatchScale() + currentStep); } } } ExecutorService executorService = Executors .newFixedThreadPool(Runtime.getRuntime().availableProcessors()); for (int scale : localScales) { if (computedScales.contains(scale)) { continue; } computedScales.add(scale); // resize to scale factor final int localScale = scale; Size sz = new Size(sceneImageMat.cols() * scale / 1000.0, sceneImageMat.rows() * localScale / 1000.0); // skip if resized image is smaller than object if (sz.width < objectImageMat.cols() || sz.height < objectImageMat.rows()) { continue; } executorService.submit(() -> { Mat resizeSceneImageMat = new Mat(); Imgproc.resize(sceneImageMat, resizeSceneImageMat, sz); try { TemplateMatchProperties match = detectExactZone2(resizeSceneImageMat, objectImageMat, localScale, currentThreshold); matches.add(match); } catch (ImageSearchException e) { } }); } executorService.shutdown(); try { executorService.awaitTermination(10, TimeUnit.SECONDS); } catch (Exception e) { logger.info("Could not compute scale within 10 seconds", e); } // shortcut if we find a very good match double cleanThreshold = currentThreshold; matches.sort((TemplateMatchProperties t1, TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue()))); if (!matches.isEmpty() && matches.get(0).getMatchValue() > 0.9) { cleanThreshold = 0.9; currentStep = Math.min(currentStep, 50); } currentStep = currentStep / 2; // clean matches from too low matching values for (TemplateMatchProperties t : matches) { if (t.getMatchValue() < cleanThreshold) { t.setActive(false); } } } // get the best match matches.sort((TemplateMatchProperties t1, TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue()))); if (!matches.isEmpty()) { TemplateMatchProperties bestMatch = matches.get(0); if (bestMatch.getMatchValue() < 1 - detectionThreshold) { throw new ImageSearchException( String.format("No match found for threshold %.2f, match found with value %.2f", 1 - detectionThreshold, bestMatch.getMatchValue())); } detectedRectangle = new Rectangle((int) (bestMatch.getMatchLoc().x / bestMatch.getDoubleScale()), (int) (bestMatch.getMatchLoc().y / bestMatch.getDoubleScale()), (int) (objectImageMat.rows() / bestMatch.getDoubleScale()), (int) (objectImageMat.cols() / bestMatch.getDoubleScale())); if (debug) { try { Imgproc.rectangle(sceneImageMat, new Point(detectedRectangle.x, detectedRectangle.y), new Point(detectedRectangle.x + detectedRectangle.width, detectedRectangle.y + detectedRectangle.height), new Scalar(0, 255, 0)); showResultingPicture(sceneImageMat); } catch (IOException e) { } } rotationAngle = 0; sizeRatio = detectedRectangle.width / (double) objectImageMat.cols(); } else { throw new ImageSearchException("no matching has been found"); } }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
public void processFrame(final BufferedImage frame) { if (boundsResult == null) { final Mat matTemp; synchronized (frame) { matTemp = Camera.bufferedImageToMat(frame); }//w w w. j a v a2 s.c om final Mat mat = new Mat(); Imgproc.cvtColor(matTemp, mat, Imgproc.COLOR_BGR2GRAY); // This is dynamic per OTSU algorithm Imgproc.threshold(mat, mat, 128, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU); String filename = String.format("bw.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, mat); // TODO: Make a master function that finds all chessboard corners, then just returns them as a list // Instead of all this garbage intertwined with paper and projector calibration // Step 1: Find the chessboard corners Optional<MatOfPoint2f> boardCorners = findChessboard(mat); if (!boardCorners.isPresent()) return; // THIS FUNCTION ALSO BLANKS THE PAPER PATTERN IN mat // Which the function description tells you, so this is a second warning Optional<Dimension2D> newPaperDimensions = findPaperPattern(boardCorners.get(), mat, null); if (!paperDimensions.isPresent() && newPaperDimensions.isPresent()) { paperDimensions = newPaperDimensions; logger.debug("Found paper dimensions {}", paperDimensions.get()); } if (newPaperDimensions.isPresent()) boardCorners = findChessboard(mat); if (!boardCorners.isPresent()) return; Optional<Bounds> bounds = calibrateFrame(boardCorners.get(), mat); if (bounds.isPresent()) { boundsResult = bounds.get(); if (calculateFrameDelay) { logger.debug("Checking frame delay"); checkForFrameChange(frame); frameTimestampBeforeFrameChange = cameraManager.getCurrentFrameTimestamp(); cameraManager.setArenaBackground(null); } else { if (callback != null) { callback.call(null); } } } } else { final Optional<Long> frameDelay = checkForFrameChange(frame); if (frameDelay.isPresent()) { frameDelayResult = frameDelay.get(); logger.debug("frameDelayResult {}", frameDelayResult); if (callback != null) { callback.call(null); } } } }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private Optional<MatOfPoint2f> findIdealCorners(final Mat frame, final MatOfPoint2f estimatedPatternRect) { Mat traceMat = null;/*from ww w . ja v a 2 s . c o m*/ if (logger.isTraceEnabled()) { Mat traceMatTemp = frame.clone(); traceMat = new Mat(); Imgproc.cvtColor(traceMatTemp, traceMat, Imgproc.COLOR_GRAY2BGR); } // pixel distance, dynamic because we want to allow any resolution or // distance from pattern final int toleranceThreshold = (int) (minimumDimension / (double) (PATTERN_HEIGHT - 1) / 1.5); // Grey scale conversion. //final Mat grey = new Mat(); //Imgproc.cvtColor(frame, grey, Imgproc.COLOR_BGR2GRAY); final Mat grey = frame; // Find edges Imgproc.Canny(grey, grey, CANNY_THRESHOLD_1, CANNY_THRESHOLD_2); // Blur the lines, otherwise the lines algorithm does not consider them Imgproc.GaussianBlur(grey, grey, gaussianBlurSize, GAUSSIANBLUR_SIGMA); if (logger.isTraceEnabled()) { logger.trace("tolerance threshold {} minimumDimension {}", toleranceThreshold, minimumDimension); String filename = String.format("calibrate-undist-grey-lines.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, grey); } if (logger.isDebugEnabled()) logger.debug("estimation {} {} {} {}", estimatedPatternRect.get(0, 0), estimatedPatternRect.get(1, 0), estimatedPatternRect.get(2, 0), estimatedPatternRect.get(3, 0)); // Easier to work off of Points final Point[] estimatedPoints = matOfPoint2fToPoints(estimatedPatternRect); if (logger.isTraceEnabled()) { Core.circle(traceMat, estimatedPoints[0], 1, new Scalar(0, 0, 255), -1); Core.circle(traceMat, estimatedPoints[1], 1, new Scalar(0, 0, 255), -1); Core.circle(traceMat, estimatedPoints[2], 1, new Scalar(0, 0, 255), -1); Core.circle(traceMat, estimatedPoints[3], 1, new Scalar(0, 0, 255), -1); } // Find lines // These parameters are just guesswork right now final Mat mLines = new Mat(); final int minLineSize = (int) (minimumDimension * .90); final int lineGap = toleranceThreshold; // Do it Imgproc.HoughLinesP(grey, mLines, HOUGHLINES_RHO, HOUGHLINES_THETA, HOUGHLINES_THRESHOLD, minLineSize, lineGap); // Find the lines that match our estimates final Set<double[]> verifiedLines = new HashSet<double[]>(); for (int x = 0; x < mLines.cols(); x++) { final double[] vec = mLines.get(0, x); final double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3]; final Point start = new Point(x1, y1); final Point end = new Point(x2, y2); if (nearPoints(estimatedPoints, start, toleranceThreshold) && nearPoints(estimatedPoints, end, toleranceThreshold)) { verifiedLines.add(vec); if (logger.isTraceEnabled()) { Core.line(traceMat, start, end, new Scalar(255, 0, 0), 1); } } } if (logger.isTraceEnabled()) logger.trace("verifiedLines: {}", verifiedLines.size()); // Reduce the lines to possible corners final Set<Point> possibleCorners = new HashSet<Point>(); for (double[] line1 : verifiedLines) { for (double[] line2 : verifiedLines) { if (line1 == line2) continue; Optional<Point> intersection = computeIntersect(line1, line2); if (intersection.isPresent()) possibleCorners.add(intersection.get()); } } // Reduce the possible corners to ideal corners Point[] idealCorners = new Point[4]; final double[] idealDistances = { toleranceThreshold, toleranceThreshold, toleranceThreshold, toleranceThreshold }; for (Point pt : possibleCorners) { for (int i = 0; i < 4; i++) { final double distance = euclideanDistance(pt, estimatedPoints[i]); if (distance < idealDistances[i]) { idealDistances[i] = distance; idealCorners[i] = pt; } } } if (logger.isTraceEnabled()) { logger.trace("idealDistances {} {} {} {}", idealDistances[0], idealDistances[1], idealDistances[2], idealDistances[3]); String filename = String.format("calibrate-lines.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, traceMat); } // Verify that we have the corners we need for (Point pt : idealCorners) { if (pt == null) return Optional.empty(); if (logger.isTraceEnabled()) { logger.trace("idealCorners {}", pt); Core.circle(traceMat, pt, 1, new Scalar(0, 255, 255), -1); } } if (logger.isTraceEnabled()) { String filename = String.format("calibrate-lines-with-corners.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, traceMat); } // Sort them into the correct order // 1st-------2nd // | | // | | // | | // 3rd-------4th idealCorners = sortCorners(idealCorners); // build the MatofPoint2f final MatOfPoint2f sourceCorners = new MatOfPoint2f(); sourceCorners.alloc(4); for (int i = 0; i < 4; i++) { sourceCorners.put(i, 0, new double[] { idealCorners[i].x, idealCorners[i].y }); } return Optional.of(sourceCorners); }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private Mat warpPerspective(final Mat frame) { if (warpInitialized) { final Mat mat = new Mat(); Imgproc.warpPerspective(frame, mat, perspMat, frame.size(), Imgproc.INTER_LINEAR); return mat; } else {// w ww.j a v a 2s .c o m logger.warn("warpPerspective called when warpInitialized is false - {} {} - {}", perspMat, boundingBox, isCalibrated); return frame; } }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private Mat warpCorners(MatOfPoint2f imageCorners) { Mat mat = null;/*from w w w . ja v a 2 s . c om*/ if (warpInitialized) { mat = new Mat(); Core.transform(imageCorners, mat, perspMat); } else { logger.warn("warpCorners called when warpInitialized is false - {} {} - {}", perspMat, boundingBox, isCalibrated); } return mat; }
From source file:com.shootoff.camera.Camera.java
License:Open Source License
public static Mat colorTransfer(Mat source, Mat target) { Mat src = new Mat(); Mat dst = new Mat(); Imgproc.cvtColor(source, src, Imgproc.COLOR_BGR2Lab); Imgproc.cvtColor(target, dst, Imgproc.COLOR_BGR2Lab); ArrayList<Mat> src_channels = new ArrayList<Mat>(); ArrayList<Mat> dst_channels = new ArrayList<Mat>(); Core.split(src, src_channels);//ww w . j a v a2 s . c o m Core.split(dst, dst_channels); for (int i = 0; i < 3; i++) { MatOfDouble src_mean = new MatOfDouble(), src_std = new MatOfDouble(); MatOfDouble dst_mean = new MatOfDouble(), dst_std = new MatOfDouble(); Core.meanStdDev(src_channels.get(i), src_mean, src_std); Core.meanStdDev(dst_channels.get(i), dst_mean, dst_std); dst_channels.get(i).convertTo(dst_channels.get(i), CvType.CV_64FC1); Core.subtract(dst_channels.get(i), dst_mean, dst_channels.get(i)); Core.divide(dst_std, src_std, dst_std); Core.multiply(dst_channels.get(i), dst_std, dst_channels.get(i)); Core.add(dst_channels.get(i), src_mean, dst_channels.get(i)); dst_channels.get(i).convertTo(dst_channels.get(i), CvType.CV_8UC1); } Core.merge(dst_channels, dst); Imgproc.cvtColor(dst, dst, Imgproc.COLOR_Lab2BGR); return dst; }
From source file:com.shootoff.camera.shotdetection.JavaShotDetector.java
License:Open Source License
/** * Use and HSV copy of the current camera frame to detect shots and use a * BGR copy to draw bright pixels as red and high motion pixels as blue. The * BGR copy is what ShootOFF shows/*ww w. j a va2 s. c o m*/ * * @param frameHSV * * @param frameBGR * a blue, green, red copy of the current frame for drawing * bright/high motion pixels * @param detectShots * whether or not to detect a shot */ @Override public void processFrame(final Mat frameBGR, final boolean detectShots) { updateMovingAveragePeriod(); // Must reset before every updateFilter loop brightPixels.clear(); // Create a hue, saturation, value copy of the current frame used to // detect // the shots. The BGR version is just used by this implementation to // show // the user where bright/high motion pixels are final Mat frameHSV = new Mat(); Imgproc.cvtColor(frameBGR, frameHSV, Imgproc.COLOR_BGR2HSV); final Set<Pixel> thresholdPixels = findThresholdPixelsAndUpdateFilter(frameHSV, (detectShots && filtersInitialized)); int thresholdPixelsSize = thresholdPixels.size(); if (logger.isTraceEnabled()) { if (thresholdPixelsSize >= 1) logger.trace("thresholdPixels {} getMinimumShotDimension {}", thresholdPixelsSize, getMinimumShotDimension()); for (final Pixel pixel : thresholdPixels) { logger.trace("thresholdPixel {} {} - from array {} from pixel cur {} avg {}", pixel.x, pixel.y, lumsMovingAverage[pixel.x][pixel.y], pixel.getCurrentLum(), pixel.getLumAverage()); } } if (!filtersInitialized) filtersInitialized = checkIfInitialized(); if (detectShots && filtersInitialized) { updateAvgThresholdPixels(thresholdPixelsSize); updateAvgBrightPixels(brightPixels.size()); if (shouldShowBrightnessWarning()) { cameraManager.showBrightnessWarning(); } if (thresholdPixelsSize >= getMinimumShotDimension() && !isExcessiveMotion(thresholdPixelsSize)) { final Set<PixelCluster> clusters = pixelClusterManager.clusterPixels(thresholdPixels, getMinimumShotDimension()); if (logger.isTraceEnabled()) { logger.trace("thresholdPixels {}", thresholdPixelsSize); logger.trace("clusters {}", clusters.size()); } detectShots(frameHSV, clusters); } // Moved to after detectShots because otherwise we'll have changed // pixels in the frame that's being checked for shots else if (isExcessiveMotion(thresholdPixelsSize)) { if (shouldShowMotionWarning(thresholdPixelsSize)) cameraManager.showMotionWarning(); for (final Pixel pixel : thresholdPixels) { frameBGR.put(pixel.y, pixel.x, BLUE_MAT_PIXEL); } } if (shouldShowBrightnessWarningBool && !brightPixels.isEmpty()) { // Make the feed pixels red so the user can easily see what the // problem pixels are for (final Pixel pixel : brightPixels) { frameBGR.put(pixel.y, pixel.x, RED_MAT_PIXEL); } } } }
From source file:com.shootoff.camera.shotdetection.JavaShotDetector.java
License:Open Source License
private void addShot(Mat workingFrame, PixelCluster pc) { final Optional<Color> color = pc.getColor(workingFrame, colorDistanceFromRed); if (!color.isPresent()) { if (logger.isDebugEnabled()) logger.debug("Processing Shot: Shot Rejected By Lack Of Color Density"); return;//from ww w . jav a 2s . c o m } final double x = pc.centerPixelX; final double y = pc.centerPixelY; if (super.addShot(color.get(), x, y, true) && config.isDebugShotsRecordToFiles()) { final Mat debugFrame = new Mat(); Imgproc.cvtColor(workingFrame, debugFrame, Imgproc.COLOR_HSV2BGR); String filename = String.format("shot-%d-%d-%d_orig.png", cameraManager.getFrameCount(), (int) pc.centerPixelX, (int) pc.centerPixelY); final File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, debugFrame); for (final Pixel p : pc) { if (javafx.scene.paint.Color.GREEN.equals(color.get())) { final double[] greenColor = { 0, 255, 0 }; debugFrame.put(p.y, p.x, greenColor); } else { final double[] redColor = { 0, 0, 255 }; debugFrame.put(p.y, p.x, redColor); } } File outputfile = new File(String.format("shot-%d-%d-%d.png", cameraManager.getFrameCount(), (int) pc.centerPixelX, (int) pc.centerPixelY)); filename = outputfile.toString(); Highgui.imwrite(filename, debugFrame); } }
From source file:com.sikulix.api.Element.java
License:Open Source License
public Picture capture(Element elem) { content = new Mat(); Picture img = new Picture(); if (isSpecial()) { SX.terminate(1, "capture: special not implemented"); } else {/* w w w . ja v a 2s. co m*/ Robot robot = SX.getSXROBOT(); img = new Picture(robot.createScreenCapture(elem.getRectangle())); } if (img.hasContent()) { content = img.getContent(); } return img; }
From source file:com.sikulix.api.Image.java
License:Open Source License
private Mat get(URL url) { Mat mContent = new Mat(); if (SX.isSet(url)) { urlImg = url;/*from w ww . j av a 2 s. c o m*/ if (isCaching()) { mContent = imageFiles.get(urlImg); if (SX.isNull(mContent)) { mContent = new Mat(); } } if (mContent.empty()) { mContent = get(); } if (isCaching() && !mContent.empty()) { changeCache(true, urlImg, content); } } return mContent; }