diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index 928668b..3f37fb8 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -4,23 +4,31 @@ package frc.robot; +import org.littletonrobotics.junction.LoggedRobot; +import org.littletonrobotics.junction.Logger; +import org.littletonrobotics.junction.networktables.NT4Publisher; + import edu.wpi.first.wpilibj.TimedRobot; import edu.wpi.first.wpilibj2.command.CommandScheduler; import frc.robot.Subsystems.Drive.Drive; -import frc.robot.Subsystems.Vision.Vision; +import frc.robot.Subsystems.GamePieceFinder.GamePieceFinder; +import frc.robot.Subsystems.Vision.Vision; + /** * The methods in this class are called automatically corresponding to each mode, as described in * the TimedRobot documentation. If you change the name of this class or the package after creating * this project, you must also update the Main.java file in the project. */ -public class Robot extends TimedRobot { +public class Robot extends LoggedRobot { /** * This function is run when the robot is first started up and should be used for any * initialization code. */ public Robot() { Drive.getInstance().zeroGyro(); + Logger.addDataReceiver(new NT4Publisher()); + Logger.start(); } @Override @@ -28,6 +36,7 @@ public void robotPeriodic() { CommandScheduler.getInstance().run(); Vision.getInstance().periodic(); Drive.getInstance().periodic(); + GamePieceFinder.getInstance().periodic(); } @Override diff --git a/src/main/java/frc/robot/Subsystems/Drive/Drive.java b/src/main/java/frc/robot/Subsystems/Drive/Drive.java index 43c5db4..daa48aa 100644 --- a/src/main/java/frc/robot/Subsystems/Drive/Drive.java +++ b/src/main/java/frc/robot/Subsystems/Drive/Drive.java @@ -101,21 +101,12 @@ public void runState() { } logOutputs(driveIO.getDrive().getState()); - if (!DRIVER_CONTROLLER.getAButton()) { - getState().driveRobot(); - } else { - if (vision.getYawToTarget() != null) - { - Matrix robotToFRMatrix = ROBOT_TO_FRONT_RIGHT_CAMERA.toMatrix(); - Matrix cameraToObj= new Transform3d(Translation3d.kZero, new Rotation3d(0,0, vision.getYawToTarget().in(Radians))).toMatrix(); - Matrix robotToObj = robotToFRMatrix.times(cameraToObj); - double currentAngle = getDriveTrain().getPigeon2().getYaw().getValueAsDouble() % 360; - // double wantedAngle = currentAngle - (vision.getYawToTarget().in(Degree) + ROBOT_TO_FRONT_RIGHT_CAMERA_ROTATION.getZ()); - - driveFieldRelative(0, 0, .1 * thetaController.calculate(currentAngle, new Transform3d(robotToObj).getRotation().getMeasureZ().in(Degree))); - } - } + + + + + field.setRobotPose(getPose()); SmartDashboard.putData("Field", field); } @@ -153,13 +144,6 @@ public void zeroGyro() { driveIO.zeroGyro(); } - // SYSId Trash (no hate ofc) - public enum SysIdMode { - TRANSLATION, - STEER, - ROTATION, - } - // Util public Pose2d getPose() { return driveIO.getDrive().getState().Pose; @@ -176,12 +160,4 @@ public LinearVelocity getVelocity() { public TunerSwerveDrivetrain getDriveTrain() { return driveIO.getDrive(); } - - public void addVisionMeasurement(Pose2d visionPose, double timestamp, Matrix visionMeasurementStdDevs) { - if (ROBOT_MODE == RobotMode.REAL) { - driveIO.addVisionMeasurement(visionPose, Utils.fpgaToCurrentTime(timestamp), visionMeasurementStdDevs); - } else { - driveIO.addVisionMeasurement(visionPose, timestamp, visionMeasurementStdDevs); - } - } } diff --git a/src/main/java/frc/robot/Subsystems/Drive/DriveConstants.java b/src/main/java/frc/robot/Subsystems/Drive/DriveConstants.java index e214f84..1552ab5 100644 --- a/src/main/java/frc/robot/Subsystems/Drive/DriveConstants.java +++ b/src/main/java/frc/robot/Subsystems/Drive/DriveConstants.java @@ -11,7 +11,6 @@ import edu.wpi.first.units.measure.Distance; import edu.wpi.first.units.measure.LinearAcceleration; import edu.wpi.first.units.measure.LinearVelocity; -import frc.robot.Subsystems.Drive.Drive.SysIdMode; public class DriveConstants { @@ -35,8 +34,6 @@ public class DriveConstants { public static final LinearVelocity TIPPING_LIMITER_THRESHOLD = MetersPerSecond.of(3); - // Change to change the sysID test that gets run for drive - public static final SysIdMode SYS_ID_MODE = SysIdMode.STEER; public static final String SUBSYSTEM_NAME = "Drive"; // For zeroing on robot init diff --git a/src/main/java/frc/robot/Subsystems/GamePieceFinder/GamePieceFinder.java b/src/main/java/frc/robot/Subsystems/GamePieceFinder/GamePieceFinder.java new file mode 100644 index 0000000..34bd832 --- /dev/null +++ b/src/main/java/frc/robot/Subsystems/GamePieceFinder/GamePieceFinder.java @@ -0,0 +1,229 @@ +package frc.robot.Subsystems.GamePieceFinder; + +import static edu.wpi.first.units.Units.*; +import static frc.robot.Subsystems.Vision.VisionConstants.*; + +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import org.littletonrobotics.junction.Logger; + +import org.photonvision.targeting.PhotonTrackedTarget; + +import edu.wpi.first.math.geometry.*; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.units.measure.Angle; +import edu.wpi.first.units.measure.Distance; +import edu.wpi.first.units.measure.Time; +import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; +import frc.robot.Subsystems.Drive.Drive; + +public class GamePieceFinder { + + private static final AtomicReference instance = new AtomicReference<>(); + + private final Deque parallaxSamples = new LinkedList<>(); + private final List confirmedPieces = new ArrayList<>(); + + private final Time SAMPLE_EXPIRATION = Seconds.of(10.0); + + private final double MIN_YAW_DIFFERENCE_DEG = 10.0; + private final double AREA_DISTANCE_TOLERANCE = 0.3; // 30% + + private final Translation2d CAMERA_OFFSET = new Translation2d(0.0, 0.0); + + private final Rotation2d CAMERA_ROTATION_OFFSET = new Rotation2d(Units.degreesToRadians(27.8)); + + private Pose2d latestEstimate; + + private record Ray2d(Translation2d origin, Translation2d dir, double length, double area, Angle measuredAngle, Pose2d robotPose) {} + + private class GamePieceParallaxSample { + public final Pose2d robotPose; + public final PhotonTrackedTarget visionSample; + public final Time timestamp; + + public GamePieceParallaxSample(Pose2d robotPose, Time timestamp, PhotonTrackedTarget visionSample) { + this.robotPose = robotPose; + this.visionSample = visionSample; + this.timestamp = timestamp; + } + + public Ray2d toRay() { + double yawDeg = visionSample.getYaw(); + double distance = estimateDistanceFromArea(visionSample.getArea()); + + Translation2d cameraPos = robotPose.transformBy(new Transform2d(CAMERA_OFFSET, CAMERA_ROTATION_OFFSET)).getTranslation(); + + double globalAngle = robotPose.getRotation().getRadians() + Math.toRadians(yawDeg); + Translation2d dir = new Translation2d(Math.cos(globalAngle), Math.sin(globalAngle)); + + return new Ray2d(cameraPos, dir, distance, visionSample.getArea(), Degrees.of(yawDeg), robotPose); + } + } + + public static GamePieceFinder getInstance() { + if (instance.get() == null) { + instance.set(new GamePieceFinder()); + } + return instance.get(); + } + + private GamePieceFinder() {} + + public void addVisionSample(PhotonTrackedTarget sample) { + // Order of if statements matters here + if (parallaxSamples.size() > 0 && Math.abs(sample.getYaw() - parallaxSamples.peekLast().visionSample.getYaw())< MIN_YAW_DIFFERENCE_DEG) { + return; + } + parallaxSamples.add(new GamePieceParallaxSample(Drive.getInstance().getPose(), Milliseconds.of(System.currentTimeMillis()), sample)); + + } + + public List getDetectedPiecesSortedByDistance(Pose2d robotPose) { + return confirmedPieces.stream() + .sorted(Comparator.comparingDouble(p -> + p.getTranslation().getDistance(robotPose.getTranslation()))) + .toList(); + } + + public Pose2d getLatestGamepieceEstimate() { + return latestEstimate; + } + + public void periodic() { + cleanupOldSamples(); + if (parallaxSamples.size() > 1) updateEstimates(); + Logger.recordOutput("GamePieceFinder/ConfirmedPieces", confirmedPieces.size()); + Logger.recordOutput("GamePieceFinder/Pose", getLatestGamepieceEstimate()); + Logger.recordOutput("GamePiceceFinder/VisionSamples", parallaxSamples.size()); + } + + public void clearPoseEstimates() { + confirmedPieces.clear(); + latestEstimate = null; + } + + + private void updateEstimates() { + List samples = new ArrayList<>(parallaxSamples); + Set toRemove = new HashSet<>(); + + for (int i = 0; i < samples.size(); i++) { + for (int j = i + 1; j < samples.size(); j++) { + //I need the "first" sample to have the smaller rotation so that I can set it as 0 and go off of that for calculations + //TODO: Logic will mess up if rotation can be negative, so need to confirm that its not/deal with it if it is + //TODO: Probably better way to implement this logic lol + var s1 = samples.get(i); + var s2 = samples.get(j); + + double yawDiff = Math.abs(s2.visionSample.getYaw() - s1.visionSample.getYaw()); + if (yawDiff < MIN_YAW_DIFFERENCE_DEG) continue; + + Ray2d r1 = s1.toRay(); + Ray2d r2 = s2.toRay(); + + Logger.recordOutput("R1", r1.dir); + Logger.recordOutput("R2", r2.dir); + + Translation2d objectPoint = getPoseThroughParallax(r1, r2); + System.out.println(objectPoint); + // Chat will it continue if the first condition isnt met and then leave my thingy that might get a null pointer alone + if (areasAtIntersection(r1, r2, objectPoint)) { + + Pose2d found = new Pose2d(objectPoint, new Rotation2d()); + confirmedPieces.add(found); + latestEstimate = found; + toRemove.add(s1); + toRemove.add(s2); + } + } + } + parallaxSamples.removeAll(toRemove); + } + + private void cleanupOldSamples() { + double now = System.currentTimeMillis(); + parallaxSamples.removeIf(s -> + now - s.timestamp.in(Milliseconds) > SAMPLE_EXPIRATION.in(Milliseconds)); + } + + // Yeah no idea what this relationship is gona be, linear, sqrt under, 6 or 7 + private double estimateDistanceFromArea(double area) { + double k = 6.7; + return k / Math.sqrt(Math.max(area, 0.2)); + } + + private boolean areasAtIntersection(Ray2d r1, Ray2d r2, Translation2d intersection) { + double distAlongR1 = intersection.minus(r1.origin()).getNorm(); + double distAlongR2 = intersection.minus(r2.origin()).getNorm(); + + double predictedR1 = r1.length(); + double predictedR2 = r2.length(); + + boolean ok1 = Math.abs(distAlongR1 - predictedR1) / predictedR1 < AREA_DISTANCE_TOLERANCE; + boolean ok2 = Math.abs(distAlongR2 - predictedR2) / predictedR2 < AREA_DISTANCE_TOLERANCE; + + return ok1 && ok2; + } + + // Silly vector intersection solution, idrk if this is optimal or works @william feel free to replace with your calc + private Optional intersectRays(Ray2d r1, Ray2d r2) { + double x1 = r1.origin().getX(), y1 = r1.origin().getY(); + double x2 = x1 + r1.dir().getX(), y2 = y1 + r1.dir().getY(); + double x3 = r2.origin().getX(), y3 = r2.origin().getY(); + double x4 = x3 + r2.dir().getX(), y4 = y3 + r2.dir().getY(); + + double denominator = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4); + if (Math.abs(denominator) < 1e-2) return Optional.empty(); // if theyre parallel then throw ts out + + double px = ((x1*y2 - y1*x2)*(x3 - x4) - (x1 - x2)*(x3*y4 - y3*x4)) / denominator; + double py = ((x1*y2 - y1*x2)*(y3 - y4) - (y1 - y2)*(x3*y4 - y3*x4)) / denominator; + + Translation2d p = new Translation2d(px, py); + System.out.println(p); + + // Ignore balls that are fall away + if (p.minus(r1.origin()).getNorm() > r1.length() * 1.5 || + p.minus(r2.origin()).getNorm() > r2.length() * 1.5) + return Optional.empty(); + + return Optional.of(p); + } + + //TODO: Wait what if the bot has moved its center between samples? Is that gonna be a problem + private Translation2d getPoseThroughParallax(Ray2d r1, Ray2d r2) { + Angle deltaYaw = Degrees.of(r2.robotPose().getRotation().minus(r1.robotPose().getRotation()).getDegrees()); + + Translation2d firstCameraPos = ROBOT_TO_FRONT_RIGHT_CAMERA_TRANSLATION.toTranslation2d(); + //TODO: Need to check if this rotates the right way - should be to the left + Translation2d secondCameraPos = firstCameraPos.rotateAround(Translation2d.kZero, Rotation2d.fromDegrees(deltaYaw.in(Degrees))); + + Translation2d vecBtwnCameraPos = firstCameraPos.minus(secondCameraPos); + + Angle cameraRot = ROBOT_TO_FRONT_RIGHT_CAMERA_ROTATION.getMeasureZ(); + Angle angleOffset = Radians.of(Math.atan2(firstCameraPos.getY(), firstCameraPos.getX())); + Angle firstYaw = r1.measuredAngle; + Angle secondYaw = r2.measuredAngle; + + //TODO: There's some redudant math here, need to come back and simplify/clean it up later + Angle alpha = Radians.of(Math.atan2(vecBtwnCameraPos.getY(), vecBtwnCameraPos.getX())); + Angle beta = Degrees.of(90 - alpha.in(Degrees)); + Angle A = Degrees.of(cameraRot.in(Degrees) - firstYaw.in(Degrees) - alpha.in(Degrees)); + Angle B = Degrees.of(360 - (cameraRot.in(Degrees) - secondYaw.in(Degrees) + angleOffset.in(Degrees) + ((180 - deltaYaw.in(Degrees))/2 - beta.in(Degrees)))); + Angle C = Degrees.of(180 - A.in(Degrees) - B.in(Degrees)); + + //TODO: Need to make sure coordinate system matches (im assuming right is positive and up is positive) + Distance b = Meters.of((vecBtwnCameraPos.getDistance(Translation2d.kZero)/Math.sin(C.in(Radians)))*Math.sin(A.in(Radians))); + Translation2d robotToObject = new Translation2d( + -b.in(Meters)*Math.cos(A.in(Radians) + alpha.in(Radians)) + firstCameraPos.getMeasureX().in(Meters), + b.in(Meters)*Math.sin(A.in(Radians) + alpha.in(Radians)) + firstCameraPos.getMeasureY().in(Meters) + ); + + //I did the above calculations assuming the robot was facing straight forward, so now I need to rotate the point to match the rotation of the robot + //I could change it to work for any initial direction of the bot in my calculations but whatever + //TODO: I assume it's gonna rotate in the right direction? + robotToObject = robotToObject.rotateAround(Translation2d.kZero, r1.robotPose.getRotation()); + + return r1.robotPose.getTranslation().plus(robotToObject); + } +} diff --git a/src/main/java/frc/robot/Subsystems/Vision/Vision.java b/src/main/java/frc/robot/Subsystems/Vision/Vision.java index bcd3bdb..15ad461 100644 --- a/src/main/java/frc/robot/Subsystems/Vision/Vision.java +++ b/src/main/java/frc/robot/Subsystems/Vision/Vision.java @@ -7,14 +7,15 @@ import edu.wpi.first.units.measure.Angle; import edu.wpi.first.wpilibj2.command.SubsystemBase; +import frc.robot.Subsystems.GamePieceFinder.GamePieceFinder; public class Vision extends SubsystemBase { private PhotonCamera camera; private static Vision instance; - private Angle targetYaw; - + private Angle targetYaw; + public static Vision getInstance() { if (instance == null) { instance = new Vision(FRONT_RIGHT_CAM_NAME); @@ -37,10 +38,9 @@ public void periodic() { for (var result : results) { var target = result.getBestTarget(); - if (target != null) { + GamePieceFinder.getInstance().addVisionSample(target); targetYaw = Degrees.of(target.getYaw()); - System.out.println(targetYaw.in(Degrees)); } } }