Skip to content

Commit

Permalink
Carry My Luggage 07-07 (#244)
Browse files Browse the repository at this point in the history
* fix: cropped image + gesture detection

* feat: wait for start signal, don't say we couldn't see a pointing person.

* fix: never ask, and require a minimum number of tracks before we check if we are done.

* chore: remove official leg_tracker submodule.

* chore: submodule our own leg_tracker.

* feat: try to make recovery more robust and forgiving.

* feat: ensure that we are close to the person when we stop.

* feat: manage the head manager.

* fix: we are not asking anymore.

* chore: bump leg_tracker.

* refactor: tidy launch file.

* fix: tf

* feat: don't manage head manager, be more verbose.

* small cleanup

* feat: tell person to step back, we are reaching the arm out.

* feat: handle person being too far away for vision to be viable.

* fix: clear costmap before beginning to follow.

* feat: ask person to slow down if they are going to fast, and add timeout on robot_pose to prevent infinite waiting.

* refactor: type hint

* fix: clear velocity buffer to prevent constant check_finished.
  • Loading branch information
jws-1 authored Jul 8, 2024
1 parent e0dd87b commit 315671a
Show file tree
Hide file tree
Showing 9 changed files with 250 additions and 104 deletions.
2 changes: 1 addition & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[submodule "common/third_party/leg_tracker"]
path = common/third_party/leg_tracker
url = git@github.com:angusleigh/leg_tracker.git
url = https://github.com/LASR-at-Home/leg_tracker
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
<launch>

<include file="$(find lasr_person_following)/launch/joint_leg_tracker.launch">
</include>

<include file="$(find lasr_person_following)/launch/joint_leg_tracker.launch"/>

<node pkg="lasr_person_following" type="person_following.py" name="person_following" output="screen" />

<node pkg="tiago_2dnav" type="navigation_camera_mgr.py" name="navigation_camera_mgr" output="screen"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def __init__(self) -> None:

def _execute_cb(self, _: FollowGoal) -> None:
print("Executing follow_person action")
while not self._follower.begin_tracking(ask=False):
while not self._follower.begin_tracking():
rospy.logwarn("No people found, retrying...")
rospy.sleep(rospy.Duration(1.0))
warnings.warn(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,23 @@
from play_motion_msgs.msg import PlayMotionAction
from play_motion_msgs.msg import PlayMotionGoal

from lasr_vision_msgs.srv import DetectWaveRequest
from lasr_vision_msgs.srv import DetectWaveRequest, DetectWaveResponse

from math import atan2
import numpy as np
from scipy.spatial.transform import Rotation as R

import actionlib
from pal_interaction_msgs.msg import TtsGoal, TtsAction
from lasr_speech_recognition_msgs.msg import (
TranscribeSpeechAction,
TranscribeSpeechGoal,
)

from pal_interaction_msgs.msg import TtsGoal, TtsAction


MAX_VISION_DIST: float = 5.0


class PersonFollower:
# Parameters
Expand All @@ -48,6 +52,8 @@ class PersonFollower:
_n_secs_static_plan_close: float
_new_goal_threshold: float
_stopping_distance: float
_vision_recovery_motions: List[str] = ["look_centre", "look_left", "look_right"]
_vision_recovery_attempts: int = 3

# State
_track_id: Union[None, int]
Expand All @@ -73,19 +79,17 @@ def __init__(
self,
start_following_radius: float = 2.0,
start_following_angle: float = 45.0,
n_secs_static_finished: float = 8.0,
n_secs_static_plan_close: float = 10.0,
new_goal_threshold: float = 2.0,
stopping_distance: float = 1.0,
static_speed: float = 0.0015,
max_speed: float = 0.5,
):
self._start_following_radius = start_following_radius
self._start_following_angle = start_following_angle
self._n_secs_static_finished = n_secs_static_finished
self._n_secs_static_plan_close = n_secs_static_plan_close
self._new_goal_threshold = new_goal_threshold
self._stopping_distance = stopping_distance
self._static_speed = static_speed
self._max_speed = max_speed

self._track_id = None

Expand Down Expand Up @@ -143,8 +147,12 @@ def _tf_pose(self, pose: PoseStamped, target_frame: str):
def _robot_pose_in_odom(self) -> Union[PoseStamped, None]:
try:
current_pose: PoseWithCovarianceStamped = rospy.wait_for_message(
"/robot_pose", PoseWithCovarianceStamped
"/robot_pose",
PoseWithCovarianceStamped,
timeout=rospy.Duration.from_sec(2.0),
)
except rospy.ROSException:
return None
except AttributeError:
return None

Expand All @@ -154,7 +162,7 @@ def _robot_pose_in_odom(self) -> Union[PoseStamped, None]:

return self._tf_pose(current_pose_stamped, "odom")

def begin_tracking(self, ask: bool = False) -> bool:
def begin_tracking(self) -> bool:
"""
Chooses the closest person as the target
"""
Expand Down Expand Up @@ -195,92 +203,82 @@ def begin_tracking(self, ask: bool = False) -> bool:
if not closest_person:
return False

if ask:
if self._tts_client_available and self._transcribe_speech_client_available:

# Ask if we should follow
self._tts("Should I follow you? Please answer yes or no", wait=True)

# listen
self._transcribe_speech_client.send_goal_and_wait(
TranscribeSpeechGoal()
)
transcription = self._transcribe_speech_client.get_result().sequence

if "yes" not in transcription.lower():
return False

self._tts("I will follow you", wait=False)

self._track_id = closest_person.id

rospy.loginfo(f"Tracking person with ID {self._track_id}")
return True

def _recover_track(self, say) -> bool:
if not say:
self._tts("I SAW A person waving", wait=True)
self._tts("I see you are waving", wait=True)

if self._tts_client_available and say:
self._tts("I lost track of you, please come back", wait=True)
self._tts("Please could you come back...", wait=True)

while not self.begin_tracking(ask=True) and not rospy.is_shutdown():
while not self.begin_tracking() and not rospy.is_shutdown():
rospy.loginfo("Recovering track...")
rospy.sleep(1)

return True

# recover with vision, look up and check if person is waving
def _recover_vision(self, prev_pose: PoseStamped) -> bool:
# look up with playmotion and detect wave service
# if detected, begin tracking
self._tts("I see you again", wait=False)

# use play motion to look up
self._cancel_goal()

goal = PlayMotionGoal()
goal.motion_name = "look_centre"
self._play_motion.send_goal_and_wait(goal)

self._tts("Can you wave at me so that i can try to find you easily", wait=True)
return True

# detect wave
def _detect_waving_person(self) -> DetectWaveResponse:
try:
pcl = rospy.wait_for_message("/xtion/depth_registered/points", PointCloud2)
req = DetectWaveRequest()
req.pcl_msg = pcl
req.dataset = "resnet50"
req.confidence = 0.1
response = self._detect_wave(req)
if response.wave_detected:
rospy.loginfo("Wave detected, beginning tracking")
if np.isnan(response.wave_position.point.x) or np.isnan(
response.wave_position.point.y
):
return False
goal_pose = self._tf_pose(
PoseStamped(
pose=Pose(
position=Point(
x=response.wave_position.point.x,
y=response.wave_position.point.y,
z=response.wave_position.point.z,
),
orientation=Quaternion(0, 0, 0, 1),
),
header=pcl.header,
),
"map",
)
rospy.loginfo(goal_pose.pose.position)
goal_pose.pose.orientation = self._compute_face_quat(
prev_pose.pose, goal_pose.pose
)
self._move_base(goal_pose)
return True
return response
except rospy.ServiceException as e:
rospy.loginfo(f"Error detecting wave: {e}")
return False
return DetectWaveResponse()

# recover with vision, look around and check if person is waving
def _recover_vision(self, prev_pose: PoseStamped) -> bool:

# cancel current goal
self._cancel_goal()

self._tts("Can you wave at me so that i can try to find you easily", wait=True)

for motion in self._vision_recovery_motions:
rospy.loginfo(f"Performing motion: {motion}")
goal = PlayMotionGoal()
goal.motion_name = motion
self._play_motion.send_goal_and_wait(goal)
for _ in range(self._vision_recovery_attempts):
response = self._detect_waving_person()
if response.wave_detected:
if np.isnan(response.wave_position.point.x) or np.isnan(
response.wave_position.point.y
):
continue
else:
goal_pose = self._tf_pose(
PoseStamped(
pose=Pose(
position=Point(
x=response.wave_position.point.x,
y=response.wave_position.point.y,
z=response.wave_position.point.z,
),
orientation=Quaternion(0, 0, 0, 1),
),
header=prev_pose.header,
),
"map",
)
goal_pose.pose.orientation = self._compute_face_quat(
prev_pose.pose, goal_pose.pose
)
rospy.loginfo(goal_pose.pose.position)
self._move_base(goal_pose)
return True

return False

def _euclidian_distance(self, p1: Pose, p2: Pose) -> float:
return np.linalg.norm(
Expand Down Expand Up @@ -329,8 +327,6 @@ def _check_finished(self) -> bool:
transcription = self._transcribe_speech_client.get_result().sequence

return "yes" in transcription.lower()

return True
return True

def _get_pose_on_path(
Expand Down Expand Up @@ -386,7 +382,7 @@ def follow(self) -> None:
prev_goal: Union[None, PoseStamped] = None
last_goal_time: Union[None, rospy.Time] = None
going_to_person: bool = False
track_vels: [float] = []
track_vels: List[float] = []

while not rospy.is_shutdown():

Expand All @@ -406,8 +402,27 @@ def follow(self) -> None:
if track is None:
rospy.loginfo("Lost track of person, recovering...")
person_trajectory = PoseArray()
recovery = self._recover_vision(prev_goal)
self._recover_track(say=not recovery)
ask_back: bool = False

if prev_track is not None:
robot_pose: PoseStamped = self._robot_pose_in_odom()
if robot_pose:
dist: float = self._euclidian_distance(
robot_pose.pose, prev_track.pose
)
rospy.loginfo(f"Distance to last known position: {dist}")
if dist >= MAX_VISION_DIST:
ask_back = True
else:
ask_back = True
else:
ask_back = True

if not ask_back:
self._recover_track(say=not self._recover_vision(prev_goal))
else:
self._recover_track(say=True)

prev_track = None
continue

Expand All @@ -429,6 +444,9 @@ def follow(self) -> None:
last_goal_time = rospy.Time.now()
prev_track = track

if np.mean([np.linalg.norm(vel) for vel in track_vels]) > self._max_speed:
self._tts("Please walk slower, I am struggling to keep up", wait=False)

# Distance to the previous pose
dist_to_prev = (
self._euclidian_distance(track.pose, prev_track.pose)
Expand Down Expand Up @@ -459,10 +477,51 @@ def follow(self) -> None:
elif (
np.mean([np.linalg.norm(vel) for vel in track_vels])
< self._static_speed
):

rospy.logwarn("Person has been static for too long, stopping")
) and len(track_vels) == 10:
rospy.logwarn(
"Person has been static for too long, going to them and stopping"
)
# cancel current goal
self._cancel_goal()

# clear velocity buffer
track_vels = []

robot_pose: PoseStamped = self._robot_pose_in_odom()
dist: float = self._euclidian_distance(track.pose, robot_pose.pose)
rospy.loginfo(f"Distance to person: {dist}")

# If the person is too far away, go to them
if dist > self._stopping_distance:
goal_pose = self._get_pose_on_path(
self._tf_pose(robot_pose, "map"),
self._tf_pose(
PoseStamped(
pose=track.pose,
header=tracks.header,
),
"map",
),
self._stopping_distance,
)
# If we can't find a path, face them
if goal_pose is None:
rospy.logwarn("Could not find a path to the person")
goal_pose = robot_pose
goal_pose.pose.orientation = self._compute_face_quat(
robot_pose.pose, track.pose
)
goal_pose = self._tf_pose(goal_pose, "map")
# Otherwise, face them
else:
goal_pose = robot_pose
goal_pose.pose.orientation = self._compute_face_quat(
robot_pose.pose, track.pose
)
goal_pose = self._tf_pose(goal_pose, "map")

self._move_base(goal_pose)

if self._check_finished():
rospy.loginfo("Finished following person")
break
Expand Down
2 changes: 1 addition & 1 deletion common/third_party/leg_tracker
11 changes: 10 additions & 1 deletion skills/src/lasr_skills/handover_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,21 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True):
"LOOK_CENTRE",
PlayMotion(motion_name="look_centre"),
transitions={
"succeeded": "REACH_ARM",
"succeeded": "SAY_REACH_ARM",
"aborted": "failed",
"preempted": "failed",
},
)

smach.StateMachine.add(
"SAY_REACH_ARM",
Say(text="Please step back, I am going to reach my arm out."),
transitions={
"succeeded": "REACH_ARM",
"aborted": "REACH_ARM",
"preempted": "REACH_ARM",
},
)
if vertical:
smach.StateMachine.add(
"REACH_ARM",
Expand Down
Loading

0 comments on commit 315671a

Please sign in to comment.