Skip to content
This repository has been archived by the owner on Dec 30, 2024. It is now read-only.

Commit

Permalink
v10.0 sdk
Browse files Browse the repository at this point in the history
  • Loading branch information
AnyiLin committed Sep 10, 2024
2 parents 151ead4 + 442c867 commit 9553332
Show file tree
Hide file tree
Showing 26 changed files with 851 additions and 1,537 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,244 @@
/* Copyright (c) 2024 Dryw Wade. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package org.firstinspires.ftc.robotcontroller.external.samples;

import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;

import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.Position;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;

import java.util.List;

/*
* This OpMode illustrates the basics of AprilTag based localization.
*
* For an introduction to AprilTags, see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
* "TagLibrary" will be used to compute the robot's location and orientation. This default TagLibrary contains
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the robot, relative to the field origin.
* This information is provided in the "robotPose" member of the returned "detection".
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: AprilTag Localization", group = "Concept")
@Disabled
public class ConceptAprilTagLocalization extends LinearOpMode {

private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera

/**
* Variables to store the position and orientation of the camera on the robot. Setting these
* values requires a definition of the axes of the camera and robot:
*
* Camera axes:
* Origin location: Center of the lens
* Axes orientation: +x right, +y down, +z forward (from camera's perspective)
*
* Robot axes (this is typical, but you can define this however you want):
* Origin location: Center of the robot at field height
* Axes orientation: +x right, +y forward, +z upward
*
* Position:
* If all values are zero (no translation), that implies the camera is at the center of the
* robot. Suppose your camera is positioned 5 inches to the left, 7 inches forward, and 12
* inches above the ground - you would need to set the position to (-5, 7, 12).
*
* Orientation:
* If all values are zero (no rotation), that implies the camera is pointing straight up. In
* most cases, you'll need to set the pitch to -90 degrees (rotation about the x-axis), meaning
* the camera is horizontal. Use a yaw of 0 if the camera is pointing forwards, +90 degrees if
* it's pointing straight left, -90 degrees for straight right, etc. You can also set the roll
* to +/-90 degrees if it's vertical, or 180 degrees if it's upside-down.
*/
private Position cameraPosition = new Position(DistanceUnit.INCH,
0, 0, 0, 0);
private YawPitchRollAngles cameraOrientation = new YawPitchRollAngles(AngleUnit.DEGREES,
0, -90, 0, 0);

/**
* The variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;

/**
* The variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;

@Override
public void runOpMode() {

initAprilTag();

// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();

while (opModeIsActive()) {

telemetryAprilTag();

// Push telemetry to the Driver Station.
telemetry.update();

// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}

// Share the CPU.
sleep(20);
}

// Save more CPU resources when camera is no longer needed.
visionPortal.close();

} // end method runOpMode()

/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {

// Create the AprilTag processor.
aprilTag = new AprilTagProcessor.Builder()

// The following default settings are available to un-comment and edit as needed.
//.setDrawAxes(false)
//.setDrawCubeProjection(false)
//.setDrawTagOutline(true)
//.setTagFamily(AprilTagProcessor.TagFamily.TAG_36h11)
//.setTagLibrary(AprilTagGameDatabase.getCenterStageTagLibrary())
//.setOutputUnits(DistanceUnit.INCH, AngleUnit.DEGREES)
.setCameraPose(cameraPosition, cameraOrientation)

// == CAMERA CALIBRATION ==
// If you do not manually specify calibration parameters, the SDK will attempt
// to load a predefined calibration for your camera.
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
// ... these parameters are fx, fy, cx, cy.

.build();

// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second (default)
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second (default)
// Note: Decimation can be changed on-the-fly to adapt during a match.
//aprilTag.setDecimation(3);

// Create the vision portal by using a builder.
VisionPortal.Builder builder = new VisionPortal.Builder();

// Set the camera (webcam vs. built-in RC phone camera).
if (USE_WEBCAM) {
builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
} else {
builder.setCamera(BuiltinCameraDirection.BACK);
}

// Choose a camera resolution. Not all cameras support all resolutions.
//builder.setCameraResolution(new Size(640, 480));

// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableLiveView(true);

// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);

// Choose whether or not LiveView stops if no processors are enabled.
// If set "true", monitor shows solid orange screen if no processors enabled.
// If set "false", monitor shows camera view without annotations.
//builder.setAutoStopLiveView(false);

// Set and enable the processor.
builder.addProcessor(aprilTag);

// Build the Vision Portal, using the above settings.
visionPortal = builder.build();

// Disable or re-enable the aprilTag processor at any time.
//visionPortal.setProcessorEnabled(aprilTag, true);

} // end method initAprilTag()

/**
* Add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {

List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());

// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)",
detection.robotPose.getPosition().x,
detection.robotPose.getPosition().y,
detection.robotPose.getPosition().z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)",
detection.robotPose.getOrientation().getPitch(AngleUnit.DEGREES),
detection.robotPose.getOrientation().getRoll(AngleUnit.DEGREES),
detection.robotPose.getOrientation().getYaw(AngleUnit.DEGREES)));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop

// Add "key" information to telemetry
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");

} // end method telemetryAprilTag()

} // end class
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,4 @@ public void runOpMode() throws InterruptedException
sleep(20);
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
/*
Copyright (c) 2021-24 Alan Smith
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of Alan Smith nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESSFOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

import android.graphics.Color;

import com.qualcomm.hardware.sparkfun.SparkFunLEDStick;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.Range;

/*
* This OpMode illustrates how to use the SparkFun QWIIC LED Strip
*
* This is a simple way to add a strip of 10 LEDs to your robot where you can set the color of each
* LED or the whole strip. This allows for driver feedback or even just fun ways to show your team
* colors.
*
* Why?
* Because more LEDs == more fun!!
*
* This OpMode assumes that the QWIIC LED Stick is attached to an I2C interface named "back_leds" in the robot configuration.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*
* You can buy this product here: https://www.sparkfun.com/products/18354
* Don't forget to also buy this to make it easy to connect to your Control or Expansion Hub:
* https://www.sparkfun.com/products/25596
*/
@TeleOp(name = "Concept: LED Stick", group = "Concept")
@Disabled
public class ConceptLEDStick extends OpMode {
private boolean wasUp;
private boolean wasDown;
private int brightness = 5; // this needs to be between 0 and 31
private final static double END_GAME_TIME = 120 - 30;

private SparkFunLEDStick ledStick;

@Override
public void init() {
ledStick = hardwareMap.get(SparkFunLEDStick.class, "back_leds");
ledStick.setBrightness(brightness);
ledStick.setColor(Color.GREEN);
}

@Override
public void start() {
resetRuntime();
}

@Override
public void loop() {
telemetry.addLine("Hold the A button to turn blue");
telemetry.addLine("Hold the B button to turn red");
telemetry.addLine("Hold the left bumper to turn off");
telemetry.addLine("Use DPAD Up/Down to change brightness");

if (getRuntime() > END_GAME_TIME) {
int[] ledColors = {Color.RED, Color.YELLOW, Color.RED, Color.YELLOW, Color.RED,
Color.YELLOW, Color.RED, Color.YELLOW, Color.RED, Color.YELLOW};
ledStick.setColors(ledColors);
} else if (gamepad1.a) {
ledStick.setColor(Color.BLUE);
} else if (gamepad1.b) {
ledStick.setColor(Color.RED);
} else if (gamepad1.left_bumper) {
ledStick.turnAllOff();
} else {
ledStick.setColor(Color.GREEN);
}

/*
* Use DPAD up and down to change brightness
*/
int newBrightness = brightness;
if (gamepad1.dpad_up && !wasUp) {
newBrightness = brightness + 1;
} else if (gamepad1.dpad_down && !wasDown) {
newBrightness = brightness - 1;
}
if (newBrightness != brightness) {
brightness = Range.clip(newBrightness, 0, 31);
ledStick.setBrightness(brightness);
}
telemetry.addData("Brightness", brightness);

wasDown = gamepad1.dpad_down;
wasUp = gamepad1.dpad_up;
}
}
Loading

0 comments on commit 9553332

Please sign in to comment.