|
| 1 | +/* Copyright (c) 2024 Dryw Wade. All rights reserved. |
| 2 | + * |
| 3 | + * Redistribution and use in source and binary forms, with or without modification, |
| 4 | + * are permitted (subject to the limitations in the disclaimer below) provided that |
| 5 | + * the following conditions are met: |
| 6 | + * |
| 7 | + * Redistributions of source code must retain the above copyright notice, this list |
| 8 | + * of conditions and the following disclaimer. |
| 9 | + * |
| 10 | + * Redistributions in binary form must reproduce the above copyright notice, this |
| 11 | + * list of conditions and the following disclaimer in the documentation and/or |
| 12 | + * other materials provided with the distribution. |
| 13 | + * |
| 14 | + * Neither the name of FIRST nor the names of its contributors may be used to endorse or |
| 15 | + * promote products derived from this software without specific prior written permission. |
| 16 | + * |
| 17 | + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS |
| 18 | + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 19 | + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, |
| 20 | + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
| 21 | + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE |
| 22 | + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL |
| 23 | + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR |
| 24 | + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER |
| 25 | + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, |
| 26 | + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 27 | + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 28 | + */ |
| 29 | + |
| 30 | +package org.firstinspires.ftc.robotcontroller.external.samples; |
| 31 | + |
| 32 | +import com.qualcomm.robotcore.eventloop.opmode.Disabled; |
| 33 | +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; |
| 34 | +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; |
| 35 | + |
| 36 | +import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection; |
| 37 | +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; |
| 38 | +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; |
| 39 | +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; |
| 40 | +import org.firstinspires.ftc.robotcore.external.navigation.Position; |
| 41 | +import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles; |
| 42 | +import org.firstinspires.ftc.vision.VisionPortal; |
| 43 | +import org.firstinspires.ftc.vision.apriltag.AprilTagDetection; |
| 44 | +import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor; |
| 45 | + |
| 46 | +import java.util.List; |
| 47 | + |
| 48 | +/* |
| 49 | + * This OpMode illustrates the basics of AprilTag based localization. |
| 50 | + * |
| 51 | + * For an introduction to AprilTags, see the FTC-DOCS link below: |
| 52 | + * https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html |
| 53 | + * |
| 54 | + * In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default |
| 55 | + * "TagLibrary" will be used to compute the robot's location and orientation. This default TagLibrary contains |
| 56 | + * the current Season's AprilTags and a small set of "test Tags" in the high number range. |
| 57 | + * |
| 58 | + * When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the robot, relative to the field origin. |
| 59 | + * This information is provided in the "robotPose" member of the returned "detection". |
| 60 | + * |
| 61 | + * To learn about the Field Coordinate System that is defined for FTC (and used by this OpMode), see the FTC-DOCS link below: |
| 62 | + * https://ftc-docs.firstinspires.org/en/latest/game_specific_resources/field_coordinate_system/field-coordinate-system.html |
| 63 | + * |
| 64 | + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. |
| 65 | + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. |
| 66 | + */ |
| 67 | +@TeleOp(name = "Concept: AprilTag Localization", group = "Concept") |
| 68 | +@Disabled |
| 69 | +public class ConceptAprilTagLocalization extends LinearOpMode { |
| 70 | + |
| 71 | + private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera |
| 72 | + |
| 73 | + /** |
| 74 | + * Variables to store the position and orientation of the camera on the robot. Setting these |
| 75 | + * values requires a definition of the axes of the camera and robot: |
| 76 | + * |
| 77 | + * Camera axes: |
| 78 | + * Origin location: Center of the lens |
| 79 | + * Axes orientation: +x right, +y down, +z forward (from camera's perspective) |
| 80 | + * |
| 81 | + * Robot axes (this is typical, but you can define this however you want): |
| 82 | + * Origin location: Center of the robot at field height |
| 83 | + * Axes orientation: +x right, +y forward, +z upward |
| 84 | + * |
| 85 | + * Position: |
| 86 | + * If all values are zero (no translation), that implies the camera is at the center of the |
| 87 | + * robot. Suppose your camera is positioned 5 inches to the left, 7 inches forward, and 12 |
| 88 | + * inches above the ground - you would need to set the position to (-5, 7, 12). |
| 89 | + * |
| 90 | + * Orientation: |
| 91 | + * If all values are zero (no rotation), that implies the camera is pointing straight up. In |
| 92 | + * most cases, you'll need to set the pitch to -90 degrees (rotation about the x-axis), meaning |
| 93 | + * the camera is horizontal. Use a yaw of 0 if the camera is pointing forwards, +90 degrees if |
| 94 | + * it's pointing straight left, -90 degrees for straight right, etc. You can also set the roll |
| 95 | + * to +/-90 degrees if it's vertical, or 180 degrees if it's upside-down. |
| 96 | + */ |
| 97 | + private Position cameraPosition = new Position(DistanceUnit.INCH, |
| 98 | + 0, 0, 0, 0); |
| 99 | + private YawPitchRollAngles cameraOrientation = new YawPitchRollAngles(AngleUnit.DEGREES, |
| 100 | + 0, -90, 0, 0); |
| 101 | + |
| 102 | + /** |
| 103 | + * The variable to store our instance of the AprilTag processor. |
| 104 | + */ |
| 105 | + private AprilTagProcessor aprilTag; |
| 106 | + |
| 107 | + /** |
| 108 | + * The variable to store our instance of the vision portal. |
| 109 | + */ |
| 110 | + private VisionPortal visionPortal; |
| 111 | + |
| 112 | + @Override |
| 113 | + public void runOpMode() { |
| 114 | + |
| 115 | + initAprilTag(); |
| 116 | + |
| 117 | + // Wait for the DS start button to be touched. |
| 118 | + telemetry.addData("DS preview on/off", "3 dots, Camera Stream"); |
| 119 | + telemetry.addData(">", "Touch START to start OpMode"); |
| 120 | + telemetry.update(); |
| 121 | + waitForStart(); |
| 122 | + |
| 123 | + while (opModeIsActive()) { |
| 124 | + |
| 125 | + telemetryAprilTag(); |
| 126 | + |
| 127 | + // Push telemetry to the Driver Station. |
| 128 | + telemetry.update(); |
| 129 | + |
| 130 | + // Save CPU resources; can resume streaming when needed. |
| 131 | + if (gamepad1.dpad_down) { |
| 132 | + visionPortal.stopStreaming(); |
| 133 | + } else if (gamepad1.dpad_up) { |
| 134 | + visionPortal.resumeStreaming(); |
| 135 | + } |
| 136 | + |
| 137 | + // Share the CPU. |
| 138 | + sleep(20); |
| 139 | + } |
| 140 | + |
| 141 | + // Save more CPU resources when camera is no longer needed. |
| 142 | + visionPortal.close(); |
| 143 | + |
| 144 | + } // end method runOpMode() |
| 145 | + |
| 146 | + /** |
| 147 | + * Initialize the AprilTag processor. |
| 148 | + */ |
| 149 | + private void initAprilTag() { |
| 150 | + |
| 151 | + // Create the AprilTag processor. |
| 152 | + aprilTag = new AprilTagProcessor.Builder() |
| 153 | + |
| 154 | + // The following default settings are available to un-comment and edit as needed. |
| 155 | + //.setDrawAxes(false) |
| 156 | + //.setDrawCubeProjection(false) |
| 157 | + //.setDrawTagOutline(true) |
| 158 | + //.setTagFamily(AprilTagProcessor.TagFamily.TAG_36h11) |
| 159 | + //.setTagLibrary(AprilTagGameDatabase.getCenterStageTagLibrary()) |
| 160 | + //.setOutputUnits(DistanceUnit.INCH, AngleUnit.DEGREES) |
| 161 | + .setCameraPose(cameraPosition, cameraOrientation) |
| 162 | + |
| 163 | + // == CAMERA CALIBRATION == |
| 164 | + // If you do not manually specify calibration parameters, the SDK will attempt |
| 165 | + // to load a predefined calibration for your camera. |
| 166 | + //.setLensIntrinsics(578.272, 578.272, 402.145, 221.506) |
| 167 | + // ... these parameters are fx, fy, cx, cy. |
| 168 | + |
| 169 | + .build(); |
| 170 | + |
| 171 | + // Adjust Image Decimation to trade-off detection-range for detection-rate. |
| 172 | + // eg: Some typical detection data using a Logitech C920 WebCam |
| 173 | + // Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second |
| 174 | + // Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second |
| 175 | + // Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second (default) |
| 176 | + // Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second (default) |
| 177 | + // Note: Decimation can be changed on-the-fly to adapt during a match. |
| 178 | + //aprilTag.setDecimation(3); |
| 179 | + |
| 180 | + // Create the vision portal by using a builder. |
| 181 | + VisionPortal.Builder builder = new VisionPortal.Builder(); |
| 182 | + |
| 183 | + // Set the camera (webcam vs. built-in RC phone camera). |
| 184 | + if (USE_WEBCAM) { |
| 185 | + builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")); |
| 186 | + } else { |
| 187 | + builder.setCamera(BuiltinCameraDirection.BACK); |
| 188 | + } |
| 189 | + |
| 190 | + // Choose a camera resolution. Not all cameras support all resolutions. |
| 191 | + //builder.setCameraResolution(new Size(640, 480)); |
| 192 | + |
| 193 | + // Enable the RC preview (LiveView). Set "false" to omit camera monitoring. |
| 194 | + //builder.enableLiveView(true); |
| 195 | + |
| 196 | + // Set the stream format; MJPEG uses less bandwidth than default YUY2. |
| 197 | + //builder.setStreamFormat(VisionPortal.StreamFormat.YUY2); |
| 198 | + |
| 199 | + // Choose whether or not LiveView stops if no processors are enabled. |
| 200 | + // If set "true", monitor shows solid orange screen if no processors enabled. |
| 201 | + // If set "false", monitor shows camera view without annotations. |
| 202 | + //builder.setAutoStopLiveView(false); |
| 203 | + |
| 204 | + // Set and enable the processor. |
| 205 | + builder.addProcessor(aprilTag); |
| 206 | + |
| 207 | + // Build the Vision Portal, using the above settings. |
| 208 | + visionPortal = builder.build(); |
| 209 | + |
| 210 | + // Disable or re-enable the aprilTag processor at any time. |
| 211 | + //visionPortal.setProcessorEnabled(aprilTag, true); |
| 212 | + |
| 213 | + } // end method initAprilTag() |
| 214 | + |
| 215 | + /** |
| 216 | + * Add telemetry about AprilTag detections. |
| 217 | + */ |
| 218 | + private void telemetryAprilTag() { |
| 219 | + |
| 220 | + List<AprilTagDetection> currentDetections = aprilTag.getDetections(); |
| 221 | + telemetry.addData("# AprilTags Detected", currentDetections.size()); |
| 222 | + |
| 223 | + // Step through the list of detections and display info for each one. |
| 224 | + for (AprilTagDetection detection : currentDetections) { |
| 225 | + if (detection.metadata != null) { |
| 226 | + telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name)); |
| 227 | + telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", |
| 228 | + detection.robotPose.getPosition().x, |
| 229 | + detection.robotPose.getPosition().y, |
| 230 | + detection.robotPose.getPosition().z)); |
| 231 | + telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", |
| 232 | + detection.robotPose.getOrientation().getPitch(AngleUnit.DEGREES), |
| 233 | + detection.robotPose.getOrientation().getRoll(AngleUnit.DEGREES), |
| 234 | + detection.robotPose.getOrientation().getYaw(AngleUnit.DEGREES))); |
| 235 | + } else { |
| 236 | + telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id)); |
| 237 | + telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y)); |
| 238 | + } |
| 239 | + } // end for() loop |
| 240 | + |
| 241 | + // Add "key" information to telemetry |
| 242 | + telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist."); |
| 243 | + telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)"); |
| 244 | + |
| 245 | + } // end method telemetryAprilTag() |
| 246 | + |
| 247 | +} // end class |
0 commit comments