From 315671aef203e29964d87b33a62b36f3897e6c4a Mon Sep 17 00:00:00 2001 From: Jared Swift Date: Mon, 8 Jul 2024 13:09:53 +0100 Subject: [PATCH] Carry My Luggage 07-07 (#244) * fix: cropped image + gesture detection * feat: wait for start signal, don't say we couldn't see a pointing person. * fix: never ask, and require a minimum number of tracks before we check if we are done. * chore: remove official leg_tracker submodule. * chore: submodule our own leg_tracker. * feat: try to make recovery more robust and forgiving. * feat: ensure that we are close to the person when we stop. * feat: manage the head manager. * fix: we are not asking anymore. * chore: bump leg_tracker. * refactor: tidy launch file. * fix: tf * feat: don't manage head manager, be more verbose. * small cleanup * feat: tell person to step back, we are reaching the arm out. * feat: handle person being too far away for vision to be viable. * fix: clear costmap before beginning to follow. * feat: ask person to slow down if they are going to fast, and add timeout on robot_pose to prevent infinite waiting. * refactor: type hint * fix: clear velocity buffer to prevent constant check_finished. --- .gitmodules | 2 +- .../launch/person_following.launch | 5 +- .../nodes/person_following.py | 2 +- .../lasr_person_following/person_following.py | 215 +++++++++++------- common/third_party/leg_tracker | 2 +- skills/src/lasr_skills/handover_object.py | 11 +- skills/src/lasr_skills/receive_object.py | 13 +- tasks/carry_my_luggage/launch/setup.launch | 2 +- .../src/carry_my_luggage/state_machine.py | 102 +++++++-- 9 files changed, 250 insertions(+), 104 deletions(-) diff --git a/.gitmodules b/.gitmodules index 32c10c10d..646f6f192 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "common/third_party/leg_tracker"] path = common/third_party/leg_tracker - url = git@github.com:angusleigh/leg_tracker.git + url = https://github.com/LASR-at-Home/leg_tracker diff --git a/common/navigation/lasr_person_following/launch/person_following.launch b/common/navigation/lasr_person_following/launch/person_following.launch index e6fec1f19..6cdce47c5 100644 --- a/common/navigation/lasr_person_following/launch/person_following.launch +++ b/common/navigation/lasr_person_following/launch/person_following.launch @@ -1,8 +1,7 @@ - - - + + diff --git a/common/navigation/lasr_person_following/nodes/person_following.py b/common/navigation/lasr_person_following/nodes/person_following.py index 4124045e6..456f1eaa4 100644 --- a/common/navigation/lasr_person_following/nodes/person_following.py +++ b/common/navigation/lasr_person_following/nodes/person_following.py @@ -47,7 +47,7 @@ def __init__(self) -> None: def _execute_cb(self, _: FollowGoal) -> None: print("Executing follow_person action") - while not self._follower.begin_tracking(ask=False): + while not self._follower.begin_tracking(): rospy.logwarn("No people found, retrying...") rospy.sleep(rospy.Duration(1.0)) warnings.warn( diff --git a/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py b/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py index 55d2ee9fc..2282eb8ed 100644 --- a/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py +++ b/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py @@ -26,19 +26,23 @@ from play_motion_msgs.msg import PlayMotionAction from play_motion_msgs.msg import PlayMotionGoal -from lasr_vision_msgs.srv import DetectWaveRequest +from lasr_vision_msgs.srv import DetectWaveRequest, DetectWaveResponse from math import atan2 import numpy as np from scipy.spatial.transform import Rotation as R import actionlib -from pal_interaction_msgs.msg import TtsGoal, TtsAction from lasr_speech_recognition_msgs.msg import ( TranscribeSpeechAction, TranscribeSpeechGoal, ) +from pal_interaction_msgs.msg import TtsGoal, TtsAction + + +MAX_VISION_DIST: float = 5.0 + class PersonFollower: # Parameters @@ -48,6 +52,8 @@ class PersonFollower: _n_secs_static_plan_close: float _new_goal_threshold: float _stopping_distance: float + _vision_recovery_motions: List[str] = ["look_centre", "look_left", "look_right"] + _vision_recovery_attempts: int = 3 # State _track_id: Union[None, int] @@ -73,19 +79,17 @@ def __init__( self, start_following_radius: float = 2.0, start_following_angle: float = 45.0, - n_secs_static_finished: float = 8.0, - n_secs_static_plan_close: float = 10.0, new_goal_threshold: float = 2.0, stopping_distance: float = 1.0, static_speed: float = 0.0015, + max_speed: float = 0.5, ): self._start_following_radius = start_following_radius self._start_following_angle = start_following_angle - self._n_secs_static_finished = n_secs_static_finished - self._n_secs_static_plan_close = n_secs_static_plan_close self._new_goal_threshold = new_goal_threshold self._stopping_distance = stopping_distance self._static_speed = static_speed + self._max_speed = max_speed self._track_id = None @@ -143,8 +147,12 @@ def _tf_pose(self, pose: PoseStamped, target_frame: str): def _robot_pose_in_odom(self) -> Union[PoseStamped, None]: try: current_pose: PoseWithCovarianceStamped = rospy.wait_for_message( - "/robot_pose", PoseWithCovarianceStamped + "/robot_pose", + PoseWithCovarianceStamped, + timeout=rospy.Duration.from_sec(2.0), ) + except rospy.ROSException: + return None except AttributeError: return None @@ -154,7 +162,7 @@ def _robot_pose_in_odom(self) -> Union[PoseStamped, None]: return self._tf_pose(current_pose_stamped, "odom") - def begin_tracking(self, ask: bool = False) -> bool: + def begin_tracking(self) -> bool: """ Chooses the closest person as the target """ @@ -195,23 +203,6 @@ def begin_tracking(self, ask: bool = False) -> bool: if not closest_person: return False - if ask: - if self._tts_client_available and self._transcribe_speech_client_available: - - # Ask if we should follow - self._tts("Should I follow you? Please answer yes or no", wait=True) - - # listen - self._transcribe_speech_client.send_goal_and_wait( - TranscribeSpeechGoal() - ) - transcription = self._transcribe_speech_client.get_result().sequence - - if "yes" not in transcription.lower(): - return False - - self._tts("I will follow you", wait=False) - self._track_id = closest_person.id rospy.loginfo(f"Tracking person with ID {self._track_id}") @@ -219,32 +210,20 @@ def begin_tracking(self, ask: bool = False) -> bool: def _recover_track(self, say) -> bool: if not say: - self._tts("I SAW A person waving", wait=True) + self._tts("I see you are waving", wait=True) if self._tts_client_available and say: - self._tts("I lost track of you, please come back", wait=True) + self._tts("Please could you come back...", wait=True) - while not self.begin_tracking(ask=True) and not rospy.is_shutdown(): + while not self.begin_tracking() and not rospy.is_shutdown(): rospy.loginfo("Recovering track...") rospy.sleep(1) - return True - - # recover with vision, look up and check if person is waving - def _recover_vision(self, prev_pose: PoseStamped) -> bool: - # look up with playmotion and detect wave service - # if detected, begin tracking + self._tts("I see you again", wait=False) - # use play motion to look up - self._cancel_goal() - - goal = PlayMotionGoal() - goal.motion_name = "look_centre" - self._play_motion.send_goal_and_wait(goal) - - self._tts("Can you wave at me so that i can try to find you easily", wait=True) + return True - # detect wave + def _detect_waving_person(self) -> DetectWaveResponse: try: pcl = rospy.wait_for_message("/xtion/depth_registered/points", PointCloud2) req = DetectWaveRequest() @@ -252,35 +231,54 @@ def _recover_vision(self, prev_pose: PoseStamped) -> bool: req.dataset = "resnet50" req.confidence = 0.1 response = self._detect_wave(req) - if response.wave_detected: - rospy.loginfo("Wave detected, beginning tracking") - if np.isnan(response.wave_position.point.x) or np.isnan( - response.wave_position.point.y - ): - return False - goal_pose = self._tf_pose( - PoseStamped( - pose=Pose( - position=Point( - x=response.wave_position.point.x, - y=response.wave_position.point.y, - z=response.wave_position.point.z, - ), - orientation=Quaternion(0, 0, 0, 1), - ), - header=pcl.header, - ), - "map", - ) - rospy.loginfo(goal_pose.pose.position) - goal_pose.pose.orientation = self._compute_face_quat( - prev_pose.pose, goal_pose.pose - ) - self._move_base(goal_pose) - return True + return response except rospy.ServiceException as e: rospy.loginfo(f"Error detecting wave: {e}") - return False + return DetectWaveResponse() + + # recover with vision, look around and check if person is waving + def _recover_vision(self, prev_pose: PoseStamped) -> bool: + + # cancel current goal + self._cancel_goal() + + self._tts("Can you wave at me so that i can try to find you easily", wait=True) + + for motion in self._vision_recovery_motions: + rospy.loginfo(f"Performing motion: {motion}") + goal = PlayMotionGoal() + goal.motion_name = motion + self._play_motion.send_goal_and_wait(goal) + for _ in range(self._vision_recovery_attempts): + response = self._detect_waving_person() + if response.wave_detected: + if np.isnan(response.wave_position.point.x) or np.isnan( + response.wave_position.point.y + ): + continue + else: + goal_pose = self._tf_pose( + PoseStamped( + pose=Pose( + position=Point( + x=response.wave_position.point.x, + y=response.wave_position.point.y, + z=response.wave_position.point.z, + ), + orientation=Quaternion(0, 0, 0, 1), + ), + header=prev_pose.header, + ), + "map", + ) + goal_pose.pose.orientation = self._compute_face_quat( + prev_pose.pose, goal_pose.pose + ) + rospy.loginfo(goal_pose.pose.position) + self._move_base(goal_pose) + return True + + return False def _euclidian_distance(self, p1: Pose, p2: Pose) -> float: return np.linalg.norm( @@ -329,8 +327,6 @@ def _check_finished(self) -> bool: transcription = self._transcribe_speech_client.get_result().sequence return "yes" in transcription.lower() - - return True return True def _get_pose_on_path( @@ -386,7 +382,7 @@ def follow(self) -> None: prev_goal: Union[None, PoseStamped] = None last_goal_time: Union[None, rospy.Time] = None going_to_person: bool = False - track_vels: [float] = [] + track_vels: List[float] = [] while not rospy.is_shutdown(): @@ -406,8 +402,27 @@ def follow(self) -> None: if track is None: rospy.loginfo("Lost track of person, recovering...") person_trajectory = PoseArray() - recovery = self._recover_vision(prev_goal) - self._recover_track(say=not recovery) + ask_back: bool = False + + if prev_track is not None: + robot_pose: PoseStamped = self._robot_pose_in_odom() + if robot_pose: + dist: float = self._euclidian_distance( + robot_pose.pose, prev_track.pose + ) + rospy.loginfo(f"Distance to last known position: {dist}") + if dist >= MAX_VISION_DIST: + ask_back = True + else: + ask_back = True + else: + ask_back = True + + if not ask_back: + self._recover_track(say=not self._recover_vision(prev_goal)) + else: + self._recover_track(say=True) + prev_track = None continue @@ -429,6 +444,9 @@ def follow(self) -> None: last_goal_time = rospy.Time.now() prev_track = track + if np.mean([np.linalg.norm(vel) for vel in track_vels]) > self._max_speed: + self._tts("Please walk slower, I am struggling to keep up", wait=False) + # Distance to the previous pose dist_to_prev = ( self._euclidian_distance(track.pose, prev_track.pose) @@ -459,10 +477,51 @@ def follow(self) -> None: elif ( np.mean([np.linalg.norm(vel) for vel in track_vels]) < self._static_speed - ): - - rospy.logwarn("Person has been static for too long, stopping") + ) and len(track_vels) == 10: + rospy.logwarn( + "Person has been static for too long, going to them and stopping" + ) + # cancel current goal self._cancel_goal() + + # clear velocity buffer + track_vels = [] + + robot_pose: PoseStamped = self._robot_pose_in_odom() + dist: float = self._euclidian_distance(track.pose, robot_pose.pose) + rospy.loginfo(f"Distance to person: {dist}") + + # If the person is too far away, go to them + if dist > self._stopping_distance: + goal_pose = self._get_pose_on_path( + self._tf_pose(robot_pose, "map"), + self._tf_pose( + PoseStamped( + pose=track.pose, + header=tracks.header, + ), + "map", + ), + self._stopping_distance, + ) + # If we can't find a path, face them + if goal_pose is None: + rospy.logwarn("Could not find a path to the person") + goal_pose = robot_pose + goal_pose.pose.orientation = self._compute_face_quat( + robot_pose.pose, track.pose + ) + goal_pose = self._tf_pose(goal_pose, "map") + # Otherwise, face them + else: + goal_pose = robot_pose + goal_pose.pose.orientation = self._compute_face_quat( + robot_pose.pose, track.pose + ) + goal_pose = self._tf_pose(goal_pose, "map") + + self._move_base(goal_pose) + if self._check_finished(): rospy.loginfo("Finished following person") break diff --git a/common/third_party/leg_tracker b/common/third_party/leg_tracker index e6cbb2bba..1f7c2a436 160000 --- a/common/third_party/leg_tracker +++ b/common/third_party/leg_tracker @@ -1 +1 @@ -Subproject commit e6cbb2bba218e0684714a08972a7bdfa99118e3c +Subproject commit 1f7c2a43621e7cee319a2769537bca5d6b90f909 diff --git a/skills/src/lasr_skills/handover_object.py b/skills/src/lasr_skills/handover_object.py index b68a0a741..29be3a7bd 100755 --- a/skills/src/lasr_skills/handover_object.py +++ b/skills/src/lasr_skills/handover_object.py @@ -95,12 +95,21 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True): "LOOK_CENTRE", PlayMotion(motion_name="look_centre"), transitions={ - "succeeded": "REACH_ARM", + "succeeded": "SAY_REACH_ARM", "aborted": "failed", "preempted": "failed", }, ) + smach.StateMachine.add( + "SAY_REACH_ARM", + Say(text="Please step back, I am going to reach my arm out."), + transitions={ + "succeeded": "REACH_ARM", + "aborted": "REACH_ARM", + "preempted": "REACH_ARM", + }, + ) if vertical: smach.StateMachine.add( "REACH_ARM", diff --git a/skills/src/lasr_skills/receive_object.py b/skills/src/lasr_skills/receive_object.py index 6defc9d99..c7663b361 100755 --- a/skills/src/lasr_skills/receive_object.py +++ b/skills/src/lasr_skills/receive_object.py @@ -31,6 +31,7 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True): rosparam.upload_params(ns, param) with self: + smach.StateMachine.add( "CLEAR_OCTOMAP", smach_ros.ServiceState("clear_octomap", Empty), @@ -95,12 +96,22 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True): "LOOK_CENTRE", PlayMotion(motion_name="look_centre"), transitions={ - "succeeded": "REACH_ARM", + "succeeded": "SAY_REACH_ARM", "aborted": "failed", "preempted": "failed", }, ) + smach.StateMachine.add( + "SAY_REACH_ARM", + Say(text="Please step back, I am going to reach my arm out."), + transitions={ + "succeeded": "REACH_ARM", + "aborted": "REACH_ARM", + "preempted": "REACH_ARM", + }, + ) + if vertical: smach.StateMachine.add( "REACH_ARM", diff --git a/tasks/carry_my_luggage/launch/setup.launch b/tasks/carry_my_luggage/launch/setup.launch index d9d28141c..afdf7138b 100644 --- a/tasks/carry_my_luggage/launch/setup.launch +++ b/tasks/carry_my_luggage/launch/setup.launch @@ -15,7 +15,7 @@ - + diff --git a/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py b/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py index 91e32c3e2..af78e206d 100644 --- a/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py +++ b/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py @@ -1,3 +1,4 @@ +import rospy import smach import smach_ros from lasr_skills import ( @@ -8,8 +9,19 @@ HandoverObject, ) from lasr_skills.vision import GetCroppedImage +from lasr_skills import PlayMotion from lasr_person_following.msg import FollowAction +from std_msgs.msg import Empty +from std_srvs.srv import Empty as EmptySrv + +from pal_startup_msgs.srv import ( + StartupStart, + StartupStop, + StartupStartRequest, + StartupStopRequest, +) + class CarryMyLuggage(smach.StateMachine): @@ -33,8 +45,50 @@ def execute(self, userdata): def __init__(self): smach.StateMachine.__init__(self, outcomes=["succeeded", "failed"]) - with self: + + def wait_cb(ud, msg): + rospy.loginfo("Received start signal") + return False + + smach.StateMachine.add( + "WAIT_START", + smach_ros.MonitorState( + "/carry_my_luggage/start", + Empty, + wait_cb, + ), + transitions={ + "valid": "WAIT_START", + "invalid": "STOP_HEAD_MANAGER", + "preempted": "WAIT_START", + }, + ) + + smach.StateMachine.add( + "STOP_HEAD_MANAGER", + smach_ros.ServiceState( + "/pal_startup_control/stop", + StartupStop, + request=StartupStopRequest("head_manager"), + ), + transitions={ + "succeeded": "LOOK_CENTRE", + "aborted": "LOOK_CENTRE", + "preempted": "LOOK_CENTRE", + }, + ) + + smach.StateMachine.add( + "LOOK_CENTRE", + PlayMotion(motion_name="look_centre"), + transitions={ + "succeeded": "WAIT_FOR_PERSON", + "aborted": "WAIT_FOR_PERSON", + "preempted": "WAIT_FOR_PERSON", + }, + ) + smach.StateMachine.add( "WAIT_FOR_PERSON", WaitForPerson(), @@ -56,9 +110,7 @@ def __init__(self): smach.StateMachine.add( "GET_IMAGE", - GetCroppedImage( - object_name="person", crop_method="closest", use_mask=True - ), + GetCroppedImage(object_name="person", method="closest", use_mask=True), transitions={ "succeeded": "DETECT_POINTING_DIRECTION", "failed": "failed", @@ -70,7 +122,8 @@ def __init__(self): DetectGesture(), transitions={ "succeeded": "PROCESS_POINTING_DIRECTION", - "failed": "SAY_FAILED_POINTING", + "failed": "GET_IMAGE", + "missing_keypoints": "GET_IMAGE", }, ) @@ -79,42 +132,57 @@ def __init__(self): CarryMyLuggage.ProcessPointingDirection(), transitions={ "succeeded": "SAY_BAG", - "failed": "SAY_FAILED_POINTING", + "failed": "GET_IMAGE", }, ) smach.StateMachine.add( - "SAY_FAILED_POINTING", - Say( - text="I could not detect the direction that you are pointing. I'll try again." - ), + "SAY_BAG", + Say(format_str="I need you to give me the bag on your {}."), transitions={ - "succeeded": "GET_IMAGE", + "succeeded": "START_HEAD_MANAGER", "aborted": "failed", "preempted": "failed", }, + remapping={"placeholders": "pointing_direction"}, ) smach.StateMachine.add( - "SAY_BAG", - Say(format_str="I need you to give me the bag on your {}."), + "START_HEAD_MANAGER", + smach_ros.ServiceState( + "/pal_startup_control/start", + StartupStart, + request=StartupStartRequest("head_manager", ""), + ), transitions={ "succeeded": "RECEIVE_BAG", - "aborted": "failed", - "preempted": "failed", + "aborted": "RECEIVE_BAG", + "preempted": "RECEIVE_BAG", }, - remapping={"placeholders": "pointing_direction"}, ) smach.StateMachine.add( "RECEIVE_BAG", ReceiveObject(object_name="bag", vertical=True), transitions={ - "succeeded": "SAY_FOLLOW", + "succeeded": "CLEAR_COSTMAPS", "failed": "failed", }, ) + smach.StateMachine.add( + "CLEAR_COSTMAPS", + smach_ros.ServiceState( + "/move_base/clear_costmaps", + EmptySrv, + ), + transitions={ + "succeeded": "SAY_FOLLOW", + "aborted": "SAY_FOLLOW", + "preempted": "SAY_FOLLOW", + }, + ) + smach.StateMachine.add( "SAY_FOLLOW", Say(text="I will follow you now."),