List of usage examples for org.opencv.core Mat height
public int height()
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java
License:Open Source License
@NonNull public static Mat createStripMat(@NonNull Mat mat, @NonNull Point centerPatch, boolean grouped, int maxWidth) { //done with lab schema, make rgb to show in image view // mat holds the strip image Imgproc.cvtColor(mat, mat, Imgproc.COLOR_Lab2RGB); int rightBorder = 0; double ratio; if (mat.width() < MIN_STRIP_WIDTH) { ratio = MAX_STRIP_HEIGHT / mat.height(); rightBorder = BORDER_SIZE;// w w w .jav a 2 s . c o m } else { ratio = (double) (maxWidth - 10) / (double) mat.width(); } Imgproc.resize(mat, mat, new Size(mat.width() * ratio, mat.height() * ratio)); Core.copyMakeBorder(mat, mat, BORDER_SIZE + ARROW_TRIANGLE_HEIGHT, BORDER_SIZE * 2, 0, rightBorder, Core.BORDER_CONSTANT, new Scalar(MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE)); // Draw an arrow to the patch // only draw if this is not a 'grouped' strip if (!grouped) { double x = centerPatch.x * ratio; double y = 0; MatOfPoint matOfPoint = new MatOfPoint(new Point((x - ARROW_TRIANGLE_LENGTH), y), new Point((x + ARROW_TRIANGLE_LENGTH), y), new Point(x, y + ARROW_TRIANGLE_HEIGHT), new Point((x - ARROW_TRIANGLE_LENGTH), y)); Imgproc.fillConvexPoly(mat, matOfPoint, DARK_GRAY, Core.LINE_AA, 0); } return mat; }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java
License:Open Source License
@NonNull public static Mat createDescriptionMat(String desc, int width) { int[] baseline = new int[1]; Size textSizeDesc = Imgproc.getTextSize(desc, Core.FONT_HERSHEY_SIMPLEX, TITLE_FONT_SIZE, 1, baseline); Mat descMat = new Mat((int) Math.ceil(textSizeDesc.height) * 3, width, CvType.CV_8UC3, LAB_WHITE); Imgproc.putText(descMat, desc, new Point(2, descMat.height() - textSizeDesc.height), Core.FONT_HERSHEY_SIMPLEX, TITLE_FONT_SIZE, LAB_GREY, 2, Core.LINE_AA, false); return descMat; }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java
License:Open Source License
/** * Create Mat to hold a rectangle for each color with the corresponding value. * * @param patches the patches on the strip * @param width the width of the Mat to be returned * @return the Mat with the color range// w ww . j a v a 2 s.co m */ @NonNull public static Mat createColorRangeMatGroup(@NonNull List<StripTest.Brand.Patch> patches, int width) { // vertical size of mat: size of color block - X_MARGIN + top distance double xTranslate = (double) width / (double) patches.get(0).getColors().length(); int numPatches = patches.size(); Mat colorRangeMat = new Mat((int) Math.ceil(numPatches * (xTranslate + X_MARGIN) - X_MARGIN), width, CvType.CV_8UC3, LAB_WHITE); JSONArray colors; int offset = 0; for (int p = 0; p < numPatches; p++) { colors = patches.get(p).getColors(); for (int d = 0; d < colors.length(); d++) { try { JSONObject colorObj = colors.getJSONObject(d); double value = colorObj.getDouble(SensorConstants.VALUE); JSONArray lab = colorObj.getJSONArray(SensorConstants.LAB); Scalar scalarLab = new Scalar((lab.getDouble(0) / 100) * MAX_RGB_INT_VALUE, lab.getDouble(1) + 128, lab.getDouble(2) + 128); //draw a rectangle filled with color for result value Point topLeft = new Point(xTranslate * d, offset); Point bottomRight = new Point(topLeft.x + xTranslate - X_MARGIN, xTranslate + offset - X_MARGIN); Imgproc.rectangle(colorRangeMat, topLeft, bottomRight, scalarLab, -1); //draw color value below rectangle if (p == 0) { Size textSizeValue = Imgproc.getTextSize(DECIMAL_FORMAT.format(value), Core.FONT_HERSHEY_SIMPLEX, NORMAL_FONT_SCALE, 1, null); Point centerText = new Point( topLeft.x + (bottomRight.x - topLeft.x) / 2 - textSizeValue.width / 2, colorRangeMat.height() - textSizeValue.height); Imgproc.putText(colorRangeMat, DECIMAL_FORMAT.format(value), centerText, Core.FONT_HERSHEY_SIMPLEX, NORMAL_FONT_SCALE, LAB_GREY, 2, Core.LINE_AA, false); } } catch (JSONException e) { Timber.e(e); } } offset += xTranslate; } return colorRangeMat; }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java
License:Open Source License
public static Mat getPatch(@NonNull Mat mat, @NonNull Point patchCenter, int subMatSize) { //make a subMat around center of the patch int minRow = (int) Math.round(Math.max(patchCenter.y - subMatSize, 0)); int maxRow = (int) Math.round(Math.min(patchCenter.y + subMatSize, mat.height())); int minCol = (int) Math.round(Math.max(patchCenter.x - subMatSize, 0)); int maxCol = (int) Math.round(Math.min(patchCenter.x + subMatSize, mat.width())); // create subMat return mat.submat(minRow, maxRow, minCol, maxCol).clone(); }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java
License:Open Source License
public static ColorDetected getPatchColor(@NonNull Mat mat, @NonNull Point patchCenter, int subMatSize) { //make a subMat around center of the patch int minRow = (int) Math.round(Math.max(patchCenter.y - subMatSize, 0)); int maxRow = (int) Math.round(Math.min(patchCenter.y + subMatSize, mat.height())); int minCol = (int) Math.round(Math.max(patchCenter.x - subMatSize, 0)); int maxCol = (int) Math.round(Math.min(patchCenter.x + subMatSize, mat.width())); // create subMat Mat patch = mat.submat(minRow, maxRow, minCol, maxCol); // compute the mean color and return it return OpenCVUtil.detectStripPatchColor(patch); }
From source file:org.designosaurs.opmode.DesignosaursAuto.java
@Override public void runOpMode() { appContext = hardwareMap.appContext; long startTime; if (TEST_MODE) { DesignosaursHardware.hardwareEnabled = false; Log.i(TAG, "*** TEST MODE ENABLED ***"); Log.i(TAG, "Hardware is disabled, skipping to beacon search state."); Log.i(TAG, "Web debugging interface can be found at http://" + DesignosaursUtils.getIpAddress(appContext) + ":9001/"); autonomousState = STATE_SEARCHING; }// w w w . j a va2 s .c om setInitState("Configuring hardware..."); robot.init(hardwareMap); setInitState("Initializing vuforia..."); VuforiaLocalizer.Parameters params = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId); params.cameraDirection = VuforiaLocalizer.CameraDirection.BACK; params.vuforiaLicenseKey = VUFORIA_LICENCE_KEY; vuforia = new VuforiaLocalizerImplSubclass(params); // Vuforia tracks the images, we'll call them beacons VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17"); beacons.get(0).setName("wheels"); beacons.get(1).setName("tools"); beacons.get(2).setName("legos"); beacons.get(3).setName("gears"); setInitState("Initializing OpenCV..."); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, appContext, mLoaderCallback); setInitState("Select team color using the gamepad."); while (!isStarted() && !isStopRequested()) { updateTeamColor(); robot.waitForTick(25); } if (DesignosaursHardware.hardwareEnabled) { buttonPusherManager.start(); buttonPusherManager.setStatus(ButtonPusherManager.STATE_HOMING); shooterManager.start(); shooterManager.setStatus(ShooterManager.STATE_AT_BASE); } if (HOTEL_MODE) setState(STATE_SEARCHING); beacons.activate(); startTime = System.currentTimeMillis(); robot.startTimer(); long ticksSeeingImage = 0; while (opModeIsActive()) { String beaconName = ""; // Detect and process images for (VuforiaTrackable beac : beacons) { OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose(); if (pose != null) { beaconName = beac.getName(); if (beac.getName().equals(lastScoredBeaconName)) // fixes seeing the first beacon and wanting to go back to it continue; Matrix34F rawPose = new Matrix34F(); float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12); rawPose.setData(poseData); lastPose = rawPose; recalculateCriticalPoints(); boundPoints(); centeredPos = center.y; // drive routines align based on this } } if (vuforia.rgb != null && ENABLE_CAMERA_STREAMING && System.currentTimeMillis() > (lastFrameSentAt + 50)) { lastFrameSentAt = System.currentTimeMillis(); Bitmap bm = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(), Bitmap.Config.RGB_565); bm.copyPixelsFromBuffer(vuforia.rgb.getPixels()); Bitmap resizedbitmap = DesignosaursUtils.resize(bm, bm.getWidth() / 2, bm.getHeight() / 2); FtcRobotControllerActivity.webServer.streamCameraFrame(resizedbitmap); if (center != null) { ArrayList<String> coords = new ArrayList<>(4); coords.add(start.toString()); coords.add(end.toString()); coords.add(center.toString()); FtcRobotControllerActivity.webServer.streamPoints(coords); } } switch (autonomousState) { case STATE_INITIAL_POSITIONING: robot.startOrientationTracking(); if (teamColor == TEAM_BLUE) { robot.rightMotor.setDirection(DcMotor.Direction.REVERSE); robot.leftMotor.setDirection(DcMotor.Direction.FORWARD); } if (teamColor == TEAM_RED) { robot.accel(0.3, 0.5); shooterManager.setStatus(ShooterManager.STATE_SCORING); robot.setDrivePower(0); robot.waitForTick(1400); robot.accel(0.3, 0.5); robot.waitForTick(1400); robot.goStraight(-0.2, 0.8); updateRunningState("Initial turn..."); robot.turn(-34, 0.3); updateRunningState("Secondary move..."); robot.accel(0.5, FAST_DRIVE_POWER); robot.goStraight(2.8, FAST_DRIVE_POWER); robot.decel(0.5, 0); updateRunningState("Secondary turn..."); robot.turn(38, 0.2); } else { robot.goStraight(-0.3, 0.5); shooterManager.setStatus(ShooterManager.STATE_SCORING); robot.setDrivePower(0); robot.accel(-0.3, 0.5); robot.waitForTick(1400); robot.goStraight(-0.8, 0.4); robot.setDrivePower(0); robot.turn(179, 0.45); updateRunningState("Initial turn..."); robot.turn(22, 0.3); updateRunningState("Secondary move..."); robot.accel(0.5, FAST_DRIVE_POWER); robot.goStraight(4.1, FAST_DRIVE_POWER); robot.decel(0.5, 0); updateRunningState("Secondary turn..."); robot.turn(-35, 0.3); } robot.setDrivePower(0); // Allow the camera time to focus: robot.waitForTick(1000); setState(STATE_SEARCHING); break; case STATE_SEARCHING: if (Math.abs(getRelativePosition()) < BEACON_ALIGNMENT_TOLERANCE) { stateMessage = "Analysing beacon data..."; robot.setDrivePower(0); ticksSeeingImage++; vuforia.disableFlashlight(); if (ticksSeeingImage < 5) continue; byte pass = 0; boolean successful = false; BeaconPositionResult lastBeaconPosition = null; int[] range = { 0, 500 }; Mat image = null; while (!successful) { image = getRegionAboveBeacon(); if (image == null || vuforia.rgb == null) { Log.w(TAG, "No frame! _"); robot.setDrivePower(MIN_DRIVE_POWER); continue; } lastBeaconPosition = beaconFinder.process(System.currentTimeMillis(), image, SAVE_IMAGES) .getResult(); range = lastBeaconPosition.getRangePixels(); if (range[0] < 0) range[0] = 0; if (range[1] > image.width()) range[1] = image.width(); Log.i(TAG, "Beacon finder results: " + lastBeaconPosition.toString()); if (lastBeaconPosition.isConclusive()) successful = true; else { pass++; Log.i(TAG, "Searching for beacon presence, pass #" + beaconsFound + ":" + pass + "."); // We can't see both buttons, so move back and forth and run detection algorithm again if (pass <= 3) robot.goCounts(teamColor == TEAM_RED ? -400 : 400, 0.2); if (pass <= 4 && pass >= 3) robot.goCounts(teamColor == TEAM_RED ? 1000 : -1000, 0.2); if (pass > 4) { robot.goCounts(teamColor == TEAM_RED ? -1600 : 1600, 0.4); successful = true; } // Allow camera time to autofocus: robot.setDrivePower(0); robot.waitForTick(100); } } if (autonomousState != STATE_SEARCHING) continue; // Change the values in the following line for how much off the larger image we crop (y-wise, // the x axis is controlled by where the robot thinks the beacon is, see BeaconFinder). // TODO: Tune this based on actual field Log.i(TAG, "Source image is " + image.height() + "px by " + image.width() + "px"); int width = range[1] - range[0]; Log.i(TAG, "X: " + range[0] + " Y: 0, WIDTH: " + width + ", HEIGHT: " + (image.height() > 50 ? image.height() - 50 : image.height())); if (range[0] < 0) range[0] = 0; if (width < 0) width = image.width() - range[0]; Mat croppedImageRaw = new Mat(image, new Rect(range[0], 0, width, image.height() > 50 ? image.height() - 50 : image.height())), croppedImage = new Mat(); Imgproc.resize(croppedImageRaw, croppedImage, new Size(), 0.5, 0.5, Imgproc.INTER_LINEAR); if (OBFUSCATE_MIDDLE) Imgproc.rectangle(croppedImage, new Point((croppedImage.width() / 2) - 35, 0), new Point((croppedImage.width() / 2) + 55, croppedImage.height()), new Scalar(255, 255, 255, 255), -1); BeaconColorResult lastBeaconColor = beaconProcessor .process(System.currentTimeMillis(), croppedImage, SAVE_IMAGES).getResult(); BeaconColorResult.BeaconColor targetColor = (teamColor == TEAM_RED ? BeaconColorResult.BeaconColor.RED : BeaconColorResult.BeaconColor.BLUE); Log.i(TAG, "*** BEACON FOUND ***"); Log.i(TAG, "Target color: " + (targetColor == BeaconColorResult.BeaconColor.BLUE ? "Blue" : "Red")); Log.i(TAG, "Beacon colors: " + lastBeaconColor.toString()); // Beacon is already the right color: if (lastBeaconColor.getLeftColor() == targetColor && lastBeaconColor.getRightColor() == targetColor) { robot.accel(0.5, 1); robot.goStraight(targetSide == SIDE_LEFT ? 1.2 : 0.7, 1); robot.decel(0.5, DRIVE_POWER); advanceToSecondBeacon(beaconName); continue; } // Both sides of the beacon are the wrong color, so just score the left side: if (lastBeaconColor.getLeftColor() != targetColor && lastBeaconColor.getRightColor() != targetColor) { targetSide = SIDE_LEFT; robot.setDrivePower(-DRIVE_POWER * 0.75); setState(STATE_ALIGNING_WITH_BEACON); continue; } // TODO: Replace goStraight call with proper combined distance from beacon offset + target side offset robot.goStraight(lastBeaconPosition.getOffsetFeet(), DRIVE_POWER); robot.setDrivePower(0); robot.resetDriveEncoders(); targetSide = lastBeaconColor.getLeftColor() == targetColor ? SIDE_LEFT : SIDE_RIGHT; robot.setDrivePower(-DRIVE_POWER * 0.75); setState(STATE_ALIGNING_WITH_BEACON); } else if (Math.abs(getRelativePosition()) < SLOW_DOWN_AT) { stateMessage = "Beacon seen, centering (" + String.valueOf(getRelativePosition()) + ")..."; robot.resetDriveEncoders(); if (getRelativePosition() > 0 && getRelativePosition() != Integer.MAX_VALUE) robot.setDrivePower(-MIN_DRIVE_POWER); else robot.setDrivePower(MIN_DRIVE_POWER); } break; case STATE_ALIGNING_WITH_BEACON: stateMessage = "Positioning to deploy placer..."; if (ticksInState > 450) robot.emergencyStop(); double targetCounts = (targetSide == SIDE_LEFT) ? 1150 : 250; if (Math.max(Math.abs(robot.getAdjustedEncoderPosition(robot.leftMotor)), Math.abs(robot.getAdjustedEncoderPosition(robot.rightMotor))) >= targetCounts) { Log.i(TAG, "//// DEPLOYING ////"); robot.setDrivePower(0); robot.startOrientationTracking(true); buttonPusherManager.setStatus(ButtonPusherManager.STATE_SCORING); setState(STATE_WAITING_FOR_PLACER); } break; case STATE_WAITING_FOR_PLACER: stateMessage = "Waiting for placer to deploy."; if ((buttonPusherManager.getStatus() != ButtonPusherManager.STATE_SCORING && buttonPusherManager.getTicksInState() >= 10) || buttonPusherManager.getStatus() == ButtonPusherManager.STATE_AT_BASE) { advanceToSecondBeacon(beaconName); if (beaconsFound == 2 || HOTEL_MODE) setState(STATE_FINISHED); else { robot.turn(teamColor == TEAM_RED ? -3 : 3, 0.2); robot.accel(0.5, 1); robot.turn(teamColor == TEAM_RED ? 2 : -2, 0.2); robot.goStraight(targetSide == SIDE_LEFT ? 1.5 : 1, 1); robot.decel(0.5, DRIVE_POWER); setState(STATE_SEARCHING); } } } ticksInState++; updateRunningState(); robot.waitForTick(20); } }
From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VuforiaNavigationTest1.java
License:Open Source License
@Override public void loop() { VuforiaLocalizer.CloseableFrame frame; mVLib.loop(true); // update location info and do debug telemetry if (!mVLib.getFrames().isEmpty()) { frame = mVLib.getFrames().poll(); Image img = frame.getImage(0); ByteBuffer buf = img.getPixels(); Mat m = new Mat(); m.put(0, 0, buf.array());/*from w w w . j a va2s . co m*/ telemetry.addData("Mat Width:", m.width()); telemetry.addData("Mat Height:", m.height()); } }
From source file:org.firstinspires.ftc.teamcode.opmodes.old2017.OpenCVVuforia.java
License:Open Source License
@Override public void loop() { VuforiaLocalizer.CloseableFrame frame; mVLib.loop(true); // update location info and do debug telemetry if (!mVLib.getFrames().isEmpty()) { frame = mVLib.getFrames().poll(); Image img = frame.getImage(0); ByteBuffer buf = img.getPixels(); //this currently throws an unsupportedOperation exception byte[] ray = buf.array(); Mat m = new Mat(); m.put(0, 0, ray);/*from w w w. j a v a 2 s .co m*/ telemetry.addData("Mat Width:", m.width()); telemetry.addData("Mat Height:", m.height()); } }
From source file:org.lasarobotics.vision.ftc.resq.BeaconAnalyzer.java
License:Open Source License
static Beacon.BeaconAnalysis analyze_FAST(ColorBlobDetector detectorRed, ColorBlobDetector detectorBlue, Mat img, Mat gray, ScreenOrientation orientation, Rectangle bounds, boolean debug) { //Figure out which way to read the image double orientationAngle = orientation.getAngle(); boolean swapLeftRight = orientationAngle >= 180; //swap if LANDSCAPE_WEST or PORTRAIT_REVERSE boolean readOppositeAxis = orientation == ScreenOrientation.PORTRAIT || orientation == ScreenOrientation.PORTRAIT_REVERSE; //read other axis if any kind of portrait //Bound the image if (readOppositeAxis) //Force the analysis box to transpose inself in place //noinspection SuspiciousNameCombination bounds = new Rectangle(new Point(bounds.center().y / img.height() * img.width(), bounds.center().x / img.width() * img.height()), bounds.height(), bounds.width()) .clip(new Rectangle(img.size())); if (!swapLeftRight && readOppositeAxis) //Force the analysis box to flip across its primary axis bounds = new Rectangle( new Point((img.size().width / 2) + Math.abs(bounds.center().x - (img.size().width / 2)), bounds.center().y), bounds.width(), bounds.height()); else if (swapLeftRight && !readOppositeAxis) //Force the analysis box to flip across its primary axis bounds = new Rectangle(new Point(bounds.center().x, img.size().height - bounds.center().y), bounds.width(), bounds.height()); bounds = bounds.clip(new Rectangle(img.size())); //Get contours within the bounds detectorRed.process(img);/*from w w w . j a v a 2 s . c om*/ detectorBlue.process(img); List<Contour> contoursRed = detectorRed.getContours(); List<Contour> contoursBlue = detectorBlue.getContours(); //DEBUG Draw contours before filtering if (debug) Drawing.drawContours(img, contoursRed, new ColorRGBA("#FF0000"), 2); if (debug) Drawing.drawContours(img, contoursBlue, new ColorRGBA("#0000FF"), 2); if (debug) Drawing.drawRectangle(img, bounds, new ColorRGBA("#aaaaaa"), 4); //Get the largest contour in each - we're calling this one the main light int largestIndexRed = findLargestIndexInBounds(contoursRed, bounds); int largestIndexBlue = findLargestIndexInBounds(contoursBlue, bounds); Contour largestRed = (largestIndexRed != -1) ? contoursRed.get(largestIndexRed) : null; Contour largestBlue = (largestIndexBlue != -1) ? contoursBlue.get(largestIndexBlue) : null; //If we don't have a main light for one of the colors, we know both colors are the same //TODO we should re-filter the contours by size to ensure that we get at least a decent size if (largestRed == null && largestBlue == null) return new Beacon.BeaconAnalysis(); //INFO The best contour from each color (if available) is selected as red and blue //INFO The two best contours are then used to calculate the location of the beacon //If we don't have a main light for one of the colors, we know both colors are the same //TODO we should re-filter the contours by size to ensure that we get at least a decent size //If the largest part of the non-null color is wider than a certain distance, then both are bright //Otherwise, only one may be lit //If only one is lit, and is wider than a certain distance, it is bright //TODO We are currently assuming that the beacon cannot be in a "bright" state if (largestRed == null) return new Beacon.BeaconAnalysis(); else if (largestBlue == null) return new Beacon.BeaconAnalysis(); //The height of the beacon on screen is the height of the best contour Contour largestHeight = ((largestRed.size().height) > (largestBlue.size().height)) ? largestRed : largestBlue; double beaconHeight = largestHeight.size().height; //Get beacon width on screen by extrapolating from height final double beaconActualHeight = Constants.BEACON_HEIGHT; //cm, only the lit up portion - 14.0 for entire final double beaconActualWidth = Constants.BEACON_WIDTH; //cm final double beaconWidthHeightRatio = Constants.BEACON_WH_RATIO; double beaconWidth = beaconHeight * beaconWidthHeightRatio; Size beaconSize = new Size(beaconWidth, beaconHeight); //Look at the locations of the largest contours //Check to see if the largest red contour is more left-most than the largest right contour //If it is, then we know that the left beacon is red and the other blue, and vice versa Point bestRedCenter = largestRed.centroid(); Point bestBlueCenter = largestBlue.centroid(); //DEBUG R/B text if (debug) Drawing.drawText(img, "R", bestRedCenter, 1.0f, new ColorRGBA(255, 0, 0)); if (debug) Drawing.drawText(img, "B", bestBlueCenter, 1.0f, new ColorRGBA(0, 0, 255)); //Test which side is red and blue //If the distance between the sides is smaller than a value, then return unknown final int minDistance = (int) (Constants.DETECTION_MIN_DISTANCE * beaconSize.width); //percent of beacon width boolean leftIsRed; Contour leftMostContour, rightMostContour; if (readOppositeAxis) { if (bestRedCenter.y + minDistance < bestBlueCenter.y) { leftIsRed = true; } else if (bestBlueCenter.y + minDistance < bestRedCenter.y) { leftIsRed = false; } else { return new Beacon.BeaconAnalysis(); } } else { if (bestRedCenter.x + minDistance < bestBlueCenter.x) { leftIsRed = true; } else if (bestBlueCenter.x + minDistance < bestRedCenter.x) { leftIsRed = false; } else { return new Beacon.BeaconAnalysis(); } } //Swap left and right if necessary leftIsRed = swapLeftRight != leftIsRed; //Get the left-most best contour (or top-most if axis swapped) (or right-most if L/R swapped) if (readOppositeAxis) { //Get top-most best contour leftMostContour = ((largestRed.topLeft().y) < (largestBlue.topLeft().y)) ? largestRed : largestBlue; //Get bottom-most best contour rightMostContour = ((largestRed.topLeft().y) < (largestBlue.topLeft().y)) ? largestBlue : largestRed; } else { //Get left-most best contour leftMostContour = ((largestRed.topLeft().x) < (largestBlue.topLeft().x)) ? largestRed : largestBlue; //Get the right-most best contour rightMostContour = ((largestRed.topLeft().x) < (largestBlue.topLeft().x)) ? largestBlue : largestRed; } //DEBUG Logging if (debug) Log.d("Beacon", "Orientation: " + orientation + "Angle: " + orientationAngle + " Swap Axis: " + readOppositeAxis + " Swap Direction: " + swapLeftRight); //Swap left and right if necessary //BUGFIX: invert when we swap if (!swapLeftRight) { Contour temp = leftMostContour; leftMostContour = rightMostContour; rightMostContour = temp; } //Now that we have the two contours, let's find ellipses that match //Draw the box surrounding both contours //Get the width of the contours double widthBeacon = rightMostContour.right() - leftMostContour.left(); //Center of contours is the average of centroids of the contours Point center = new Point((leftMostContour.centroid().x + rightMostContour.centroid().x) / 2, (leftMostContour.centroid().y + rightMostContour.centroid().y) / 2); //Get the combined height of the contours double heightContours = Math.max(leftMostContour.bottom(), rightMostContour.bottom()) - Math.min(leftMostContour.top(), rightMostContour.top()); //The largest size ratio of tested over actual is the scale ratio double scale = Math.max(widthBeacon / beaconActualWidth, heightContours / beaconActualHeight); //Define size of bounding box by scaling the actual beacon size Size beaconSizeFinal = new Size(beaconActualWidth * scale, beaconActualHeight * scale); //Swap x and y if we rotated the view if (readOppositeAxis) { //noinspection SuspiciousNameCombination beaconSizeFinal = new Size(beaconSizeFinal.height, beaconSizeFinal.width); } //Get points of the bounding box Point beaconTopLeft = new Point(center.x - (beaconSizeFinal.width / 2), center.y - (beaconSizeFinal.height / 2)); Point beaconBottomRight = new Point(center.x + (beaconSizeFinal.width / 2), center.y + (beaconSizeFinal.height / 2)); Rectangle boundingBox = new Rectangle(new Rect(beaconTopLeft, beaconBottomRight)); //Get ellipses in region of interest //Make sure the rectangles don't leave the image size Rectangle leftRect = leftMostContour.getBoundingRectangle().clip(new Rectangle(img.size())); Rectangle rightRect = rightMostContour.getBoundingRectangle().clip(new Rectangle(img.size())); Mat leftContourImg = gray.submat((int) leftRect.top(), (int) leftRect.bottom(), (int) leftRect.left(), (int) leftRect.right()); Mat rightContourImg = gray.submat((int) rightRect.top(), (int) rightRect.bottom(), (int) rightRect.left(), (int) rightRect.right()); //Locate ellipses in the image to process contours against List<Ellipse> ellipsesLeft = PrimitiveDetection.locateEllipses(leftContourImg).getEllipses(); Detectable.offset(ellipsesLeft, new Point(leftRect.left(), leftRect.top())); List<Ellipse> ellipsesRight = PrimitiveDetection.locateEllipses(rightContourImg).getEllipses(); Detectable.offset(ellipsesRight, new Point(rightRect.left(), rightRect.top())); //Score ellipses BeaconScoringCOMPLEX scorer = new BeaconScoringCOMPLEX(img.size()); List<BeaconScoringCOMPLEX.ScoredEllipse> scoredEllipsesLeft = scorer.scoreEllipses(ellipsesLeft, null, null, gray); scoredEllipsesLeft = filterEllipses(scoredEllipsesLeft); ellipsesLeft = BeaconScoringCOMPLEX.ScoredEllipse.getList(scoredEllipsesLeft); if (debug) Drawing.drawEllipses(img, ellipsesLeft, new ColorRGBA("#00ff00"), 3); List<BeaconScoringCOMPLEX.ScoredEllipse> scoredEllipsesRight = scorer.scoreEllipses(ellipsesRight, null, null, gray); scoredEllipsesRight = filterEllipses(scoredEllipsesRight); ellipsesRight = BeaconScoringCOMPLEX.ScoredEllipse.getList(scoredEllipsesRight); if (debug) Drawing.drawEllipses(img, ellipsesRight, new ColorRGBA("#00ff00"), 3); //Calculate ellipse center if present Point centerLeft; Point centerRight; boolean done = false; do { centerLeft = null; centerRight = null; if (scoredEllipsesLeft.size() > 0) centerLeft = scoredEllipsesLeft.get(0).ellipse.center(); if (scoredEllipsesRight.size() > 0) centerRight = scoredEllipsesRight.get(0).ellipse.center(); //Flip axis if necesary if (centerLeft != null && readOppositeAxis) { centerLeft.set(new double[] { centerLeft.y, centerLeft.x }); } if (centerRight != null && readOppositeAxis) { centerRight.set(new double[] { centerRight.y, centerRight.x }); } //Make very, very sure that we didn't just find the same ellipse if (centerLeft != null && centerRight != null) { if (Math.abs(centerLeft.x - centerRight.x) < Constants.ELLIPSE_MIN_DISTANCE * beaconSize.width) { //Are both ellipses on the left or right side of the beacon? - remove the opposite side's ellipse if (Math.abs(centerLeft.x - leftRect.center().x) < Constants.ELLIPSE_MIN_DISTANCE * beaconSize.width) scoredEllipsesRight.remove(0); else scoredEllipsesLeft.remove(0); } else done = true; } else done = true; } while (!done); //Improve the beacon center if both ellipses present byte ellipseExtrapolated = 0; if (centerLeft != null && centerRight != null) { if (readOppositeAxis) center.y = (centerLeft.x + centerRight.x) / 2; else center.x = (centerLeft.x + centerRight.x) / 2; } //Extrapolate other ellipse location if one present //FIXME: This method of extrapolation may not work when readOppositeAxis is true else if (centerLeft != null) { ellipseExtrapolated = 2; if (readOppositeAxis) centerRight = new Point(centerLeft.x - 2 * Math.abs(center.x - centerLeft.x), (centerLeft.y + center.y) / 2); else centerRight = new Point(centerLeft.x + 2 * Math.abs(center.x - centerLeft.x), (centerLeft.y + center.y) / 2); if (readOppositeAxis) centerRight.set(new double[] { centerRight.y, centerRight.x }); } else if (centerRight != null) { ellipseExtrapolated = 1; if (readOppositeAxis) centerLeft = new Point(centerRight.x + 2 * Math.abs(center.x - centerRight.x), (centerRight.y + center.y) / 2); else centerLeft = new Point(centerRight.x - 2 * Math.abs(center.x - centerRight.x), (centerRight.y + center.y) / 2); if (readOppositeAxis) centerLeft.set(new double[] { centerLeft.y, centerLeft.x }); } //Draw center locations if (debug) Drawing.drawCross(img, center, new ColorRGBA("#ffffff"), 10, 4); if (debug && centerLeft != null) { ColorRGBA c = ellipseExtrapolated != 1 ? new ColorRGBA("#ffff00") : new ColorRGBA("#ff00ff"); //noinspection SuspiciousNameCombination Drawing.drawCross(img, !readOppositeAxis ? centerLeft : new Point(centerLeft.y, centerLeft.x), c, 8, 3); } if (debug && centerRight != null) { ColorRGBA c = ellipseExtrapolated != 2 ? new ColorRGBA("#ffff00") : new ColorRGBA("#ff00ff"); //noinspection SuspiciousNameCombination Drawing.drawCross(img, !readOppositeAxis ? centerRight : new Point(centerRight.y, centerRight.x), c, 8, 3); } //Draw the rectangle containing the beacon if (debug) Drawing.drawRectangle(img, boundingBox, new ColorRGBA(0, 255, 0), 4); //Tell us the height of the beacon //TODO later we can get the distance away from the beacon based on its height and position //Remove the largest index and look for the next largest //If the next largest is (mostly) within the area of the box, then merge it in with the largest //Check if the size of the largest contour(s) is about twice the size of the other //This would indicate one is brightly lit and the other is not //If this is not true, then neither part of the beacon is highly lit //Get confidence approximation double WH_ratio = widthBeacon / beaconHeight; double ratioError = Math.abs((Constants.BEACON_WH_RATIO - WH_ratio)) / Constants.BEACON_WH_RATIO; // perfect value = 0; double averageHeight = (leftMostContour.height() + rightMostContour.height()) / 2.0; double dy = Math.abs((leftMostContour.centroid().y - rightMostContour.centroid().y) / averageHeight * Constants.FAST_HEIGHT_DELTA_FACTOR); double dArea = Math.sqrt(leftMostContour.area() / rightMostContour.area()); double buttonsdy = (centerLeft != null && centerRight != null) ? (Math.abs(centerLeft.y - centerRight.y) / averageHeight * Constants.FAST_HEIGHT_DELTA_FACTOR) : Constants.ELLIPSE_PRESENCE_BIAS; double confidence = MathUtil.normalPDFNormalized( MathUtil.distance(MathUtil.distance(MathUtil.distance(ratioError, dy), dArea), buttonsdy) / Constants.FAST_CONFIDENCE_ROUNDNESS, Constants.FAST_CONFIDENCE_NORM, 0.0); //Get button ellipses Ellipse leftEllipse = scoredEllipsesLeft.size() > 0 ? scoredEllipsesLeft.get(0).ellipse : null; Ellipse rightEllipse = scoredEllipsesRight.size() > 0 ? scoredEllipsesRight.get(0).ellipse : null; //Test for color switching if (leftEllipse != null && rightEllipse != null && leftEllipse.center().x > rightEllipse.center().x) { Ellipse tE = leftEllipse; leftEllipse = rightEllipse; rightEllipse = tE; } else if ((leftEllipse != null && leftEllipse.center().x > center.x) || (rightEllipse != null && rightEllipse.center().x < center.x)) { Ellipse tE = leftEllipse; leftEllipse = rightEllipse; rightEllipse = tE; } //Axis correction for ellipses if (swapLeftRight) { if (leftEllipse != null) leftEllipse = new Ellipse( new RotatedRect(new Point(img.width() - leftEllipse.center().x, leftEllipse.center().y), leftEllipse.size(), leftEllipse.angle())); if (rightEllipse != null) rightEllipse = new Ellipse( new RotatedRect(new Point(img.width() - rightEllipse.center().x, rightEllipse.center().y), rightEllipse.size(), rightEllipse.angle())); //Swap again after correcting axis to ensure left is left and right is right Ellipse tE = leftEllipse; leftEllipse = rightEllipse; rightEllipse = tE; } //Switch axis if necessary if (readOppositeAxis) boundingBox = boundingBox.transpose(); if (leftIsRed) return new Beacon.BeaconAnalysis(Beacon.BeaconColor.RED, Beacon.BeaconColor.BLUE, boundingBox, confidence, leftEllipse, rightEllipse); else return new Beacon.BeaconAnalysis(Beacon.BeaconColor.BLUE, Beacon.BeaconColor.RED, boundingBox, confidence, leftEllipse, rightEllipse); }
From source file:org.lasarobotics.vision.image.Filter.java
License:Open Source License
/** * Downsample and blur an image (using a Gaussian pyramid kernel) * * @param img The image//from w ww .ja va 2 s . c o m * @param scale The scale, a number greater than 1 */ public static void downsample(Mat img, double scale) { Imgproc.pyrDown(img, img, new Size((double) img.width() / scale, (double) img.height() / scale)); }