Skip to content

Commit c580dae

Browse files
jws-1fireblonde
andauthored
CML 08-07 (#248)
Co-authored-by: fireblonde <nicollehchevska@gmail.com>
1 parent 13464b7 commit c580dae

File tree

10 files changed

+162
-115
lines changed

10 files changed

+162
-115
lines changed

common/navigation/lasr_person_following/src/lasr_person_following/person_following.py

Lines changed: 45 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@
4040

4141
from pal_interaction_msgs.msg import TtsGoal, TtsAction
4242

43-
4443
MAX_VISION_DIST: float = 5.0
4544

4645

@@ -82,14 +81,16 @@ def __init__(
8281
new_goal_threshold: float = 2.0,
8382
stopping_distance: float = 1.0,
8483
static_speed: float = 0.0015,
85-
max_speed: float = 0.5,
84+
max_speed: float = 0.55,
85+
asking_time_limit: float = 15.0,
8686
):
8787
self._start_following_radius = start_following_radius
8888
self._start_following_angle = start_following_angle
8989
self._new_goal_threshold = new_goal_threshold
9090
self._stopping_distance = stopping_distance
9191
self._static_speed = static_speed
9292
self._max_speed = max_speed
93+
self._asking_time_limit = asking_time_limit
9394

9495
self._track_id = None
9596

@@ -237,12 +238,12 @@ def _detect_waving_person(self) -> DetectWaveResponse:
237238
return DetectWaveResponse()
238239

239240
# recover with vision, look around and check if person is waving
240-
def _recover_vision(self, prev_pose: PoseStamped) -> bool:
241+
def _recover_vision(self, robot_pose: PoseStamped, prev_pose: PoseStamped) -> bool:
241242

242243
# cancel current goal
243244
self._cancel_goal()
244245

245-
self._tts("Can you wave at me so that i can try to find you easily", wait=True)
246+
self._tts("Please come into my view and wave so that I can find you", wait=True)
246247

247248
for motion in self._vision_recovery_motions:
248249
rospy.loginfo(f"Performing motion: {motion}")
@@ -257,27 +258,30 @@ def _recover_vision(self, prev_pose: PoseStamped) -> bool:
257258
):
258259
continue
259260
else:
260-
goal_pose = self._tf_pose(
261-
PoseStamped(
262-
pose=Pose(
263-
position=Point(
264-
x=response.wave_position.point.x,
265-
y=response.wave_position.point.y,
266-
z=response.wave_position.point.z,
261+
goal_pose = self._get_pose_on_path(
262+
self._tf_pose(robot_pose, "map"),
263+
self._tf_pose(
264+
PoseStamped(
265+
pose=Pose(
266+
position=Point(
267+
x=response.wave_position.point.x,
268+
y=response.wave_position.point.y,
269+
z=response.wave_position.point.z,
270+
),
271+
orientation=Quaternion(0, 0, 0, 1),
267272
),
268-
orientation=Quaternion(0, 0, 0, 1),
273+
header=prev_pose.header,
269274
),
270-
header=prev_pose.header,
275+
"map",
271276
),
272-
"map",
273-
)
274-
goal_pose.pose.orientation = self._compute_face_quat(
275-
prev_pose.pose, goal_pose.pose
277+
self._stopping_distance,
276278
)
277-
rospy.loginfo(goal_pose.pose.position)
278-
self._move_base(goal_pose)
279-
return True
280-
279+
if goal_pose is None:
280+
rospy.logwarn("Could not find a path to the person")
281+
return False
282+
else:
283+
self._move_base(goal_pose)
284+
return True
281285
return False
282286

283287
def _euclidian_distance(self, p1: Pose, p2: Pose) -> float:
@@ -383,6 +387,7 @@ def follow(self) -> None:
383387
last_goal_time: Union[None, rospy.Time] = None
384388
going_to_person: bool = False
385389
track_vels: List[float] = []
390+
asking_time: rospy.Time = rospy.Time.now()
386391

387392
while not rospy.is_shutdown():
388393

@@ -419,7 +424,9 @@ def follow(self) -> None:
419424
ask_back = True
420425

421426
if not ask_back:
422-
self._recover_track(say=not self._recover_vision(prev_goal))
427+
self._recover_track(
428+
say=not self._recover_vision(robot_pose, prev_goal)
429+
)
423430
else:
424431
self._recover_track(say=True)
425432

@@ -444,8 +451,14 @@ def follow(self) -> None:
444451
last_goal_time = rospy.Time.now()
445452
prev_track = track
446453

447-
if np.mean([np.linalg.norm(vel) for vel in track_vels]) > self._max_speed:
454+
# Check if the person is walking too fast and ask them to slow down only
455+
# if you haven't asked them in the last 15 seconds
456+
if (
457+
np.mean([np.linalg.norm(vel) for vel in track_vels]) > self._max_speed
458+
and (rospy.Time.now() - asking_time).to_sec() > self._asking_time_limit
459+
):
448460
self._tts("Please walk slower, I am struggling to keep up", wait=False)
461+
asking_time = rospy.Time.now()
449462

450463
# Distance to the previous pose
451464
dist_to_prev = (
@@ -465,19 +478,22 @@ def follow(self) -> None:
465478
prev_goal = goal_pose
466479
prev_track = track
467480
last_goal_time = rospy.Time.now()
481+
# retry goal if it was aborted
468482
elif (
469483
self._move_base_client.get_state() in [GoalStatus.ABORTED]
470484
and prev_goal is not None
471485
):
472486
rospy.logwarn("Goal was aborted, retrying")
473-
rospy.logwarn((rospy.Time.now() - last_goal_time).to_sec())
474-
rospy.logwarn(track.pose == prev_track.pose)
475-
rospy.logwarn("")
476487
self._move_base(prev_goal)
488+
# check if the person has been static for too long
477489
elif (
478-
np.mean([np.linalg.norm(vel) for vel in track_vels])
479-
< self._static_speed
480-
) and len(track_vels) == 10:
490+
(
491+
np.mean([np.linalg.norm(vel) for vel in track_vels])
492+
< self._static_speed
493+
)
494+
and len(track_vels) == 10
495+
and prev_track is not None
496+
):
481497
rospy.logwarn(
482498
"Person has been static for too long, going to them and stopping"
483499
)
@@ -487,41 +503,6 @@ def follow(self) -> None:
487503
# clear velocity buffer
488504
track_vels = []
489505

490-
robot_pose: PoseStamped = self._robot_pose_in_odom()
491-
dist: float = self._euclidian_distance(track.pose, robot_pose.pose)
492-
rospy.loginfo(f"Distance to person: {dist}")
493-
494-
# If the person is too far away, go to them
495-
if dist > self._stopping_distance:
496-
goal_pose = self._get_pose_on_path(
497-
self._tf_pose(robot_pose, "map"),
498-
self._tf_pose(
499-
PoseStamped(
500-
pose=track.pose,
501-
header=tracks.header,
502-
),
503-
"map",
504-
),
505-
self._stopping_distance,
506-
)
507-
# If we can't find a path, face them
508-
if goal_pose is None:
509-
rospy.logwarn("Could not find a path to the person")
510-
goal_pose = robot_pose
511-
goal_pose.pose.orientation = self._compute_face_quat(
512-
robot_pose.pose, track.pose
513-
)
514-
goal_pose = self._tf_pose(goal_pose, "map")
515-
# Otherwise, face them
516-
else:
517-
goal_pose = robot_pose
518-
goal_pose.pose.orientation = self._compute_face_quat(
519-
robot_pose.pose, track.pose
520-
)
521-
goal_pose = self._tf_pose(goal_pose, "map")
522-
523-
self._move_base(goal_pose)
524-
525506
if self._check_finished():
526507
rospy.loginfo("Finished following person")
527508
break

common/speech/lasr_speech_recognition_whisper/nodes/transcribe_microphone_server

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,9 @@ class TranscribeSpeechAction(object):
5353
_result = lasr_speech_recognition_msgs.msg.TranscribeSpeechResult()
5454

5555
def __init__(
56-
self,
57-
action_name: str,
58-
model_params: speech_model_params,
56+
self,
57+
action_name: str,
58+
model_params: speech_model_params,
5959
) -> None:
6060
"""Starts an action server for transcribing speech.
6161
@@ -116,9 +116,9 @@ class TranscribeSpeechAction(object):
116116
)
117117

118118
def _configure_recogniser(
119-
self,
120-
energy_threshold: Optional[float] = None,
121-
pause_threshold: Optional[float] = None,
119+
self,
120+
energy_threshold: Optional[float] = None,
121+
pause_threshold: Optional[float] = None,
122122
) -> sr.Recognizer:
123123
"""Configures the speech recogniser object.
124124
@@ -197,8 +197,8 @@ class TranscribeSpeechAction(object):
197197
).get_wav_data()
198198
# Magic number 32768.0 is the maximum value of a 16-bit signed integer
199199
float_data = (
200-
np.frombuffer(wav_data, dtype=np.int16).astype(np.float32, order="C")
201-
/ 32768.0
200+
np.frombuffer(wav_data, dtype=np.int16).astype(np.float32, order="C")
201+
/ 32768.0
202202
)
203203

204204
if self._action_server.is_preempt_requested():

skills/src/lasr_skills/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from .wait import Wait
12
from .detect import Detect
23
from .detect_3d import Detect3D
34
from .detect_3d_in_area import Detect3DInArea
@@ -21,6 +22,5 @@
2122
from .recognise import Recognise
2223
from .detect_gesture import DetectGesture
2324
from .look_at_person import LookAtPerson
24-
from .wait import Wait
2525
from .find_gesture_person import FindGesturePerson
2626
from .adjust_camera import AdjustCamera

skills/src/lasr_skills/detect_gesture.py

Lines changed: 49 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,12 @@
1010
BodyPixKeypointDetectionRequest,
1111
)
1212
from sensor_msgs.msg import Image
13+
from geometry_msgs.msg import Point, Pose, PoseStamped, Quaternion, PointStamped
14+
from std_msgs.msg import Header
15+
from tf2_geometry_msgs.tf2_geometry_msgs import do_transform_pose
16+
import tf2_ros as tf
17+
from visualization_msgs.msg import Marker
18+
from markers import create_and_publish_marker
1319

1420

1521
class DetectGesture(smach.State):
@@ -29,8 +35,8 @@ def __init__(
2935
smach.State.__init__(
3036
self,
3137
outcomes=["succeeded", "missing_keypoints", "failed"],
32-
input_keys=["img_msg"],
33-
output_keys=["gesture_detected"],
38+
input_keys=["img_msg", "detection"],
39+
output_keys=["gesture_detected", "person_point"],
3440
)
3541
self.gesture_to_detect = gesture_to_detect
3642
self.bodypix_client = rospy.ServiceProxy(
@@ -46,6 +52,8 @@ def __init__(
4652
"rightWrist",
4753
"rightShoulder",
4854
]
55+
# publish a marker
56+
self.person_point_pub = rospy.Publisher("/person_point", Marker, queue_size=1)
4957

5058
def execute(self, userdata):
5159

@@ -68,13 +76,15 @@ def execute(self, userdata):
6876
for keypoint in detected_keypoints
6977
}
7078

79+
detected_gesture = "none"
80+
7181
if "leftShoulder" in keypoint_info and "leftWrist" in keypoint_info:
7282
if (
7383
self.gesture_to_detect == "raising_left_arm"
7484
or self.gesture_to_detect is None
7585
):
7686
if keypoint_info["leftWrist"]["y"] < keypoint_info["leftShoulder"]["y"]:
77-
self.gesture_to_detect = "raising_left_arm"
87+
detected_gesture = "raising_left_arm"
7888
if (
7989
self.gesture_to_detect == "pointing_to_the_left"
8090
or self.gesture_to_detect is None
@@ -83,7 +93,7 @@ def execute(self, userdata):
8393
keypoint_info["leftWrist"]["x"] - self.buffer_width
8494
> keypoint_info["leftShoulder"]["x"]
8595
):
86-
self.gesture_to_detect = "pointing_to_the_left"
96+
detected_gesture = "pointing_to_the_left"
8797

8898
if (
8999
"rightShoulder" in keypoint_info
@@ -99,7 +109,7 @@ def execute(self, userdata):
99109
keypoint_info["rightWrist"]["y"]
100110
< keypoint_info["rightShoulder"]["y"]
101111
):
102-
self.gesture_to_detect = "raising_right_arm"
112+
detected_gesture = "raising_right_arm"
103113
if (
104114
self.gesture_to_detect == "pointing_to_the_right"
105115
or self.gesture_to_detect is None
@@ -108,19 +118,47 @@ def execute(self, userdata):
108118
keypoint_info["rightShoulder"]["x"] - self.buffer_width
109119
> keypoint_info["rightWrist"]["x"]
110120
):
111-
self.gesture_to_detect = "pointing_to_the_right"
121+
detected_gesture = "pointing_to_the_right"
112122

113-
if self.gesture_to_detect is None:
114-
self.gesture_to_detect = "none"
123+
rospy.loginfo(f"Detected gesture: {detected_gesture}")
124+
userdata.gesture_detected = detected_gesture
115125

116-
rospy.loginfo(f"Detected gesture: {self.gesture_to_detect}")
117-
userdata.gesture_detected = self.gesture_to_detect
126+
if not userdata.detection.point:
127+
# take it a meter away from the robot position if no keypoints are detected
128+
robot_pose = rospy.wait_for_message("/robot_pose", Pose)
129+
userdata.person_point = Point(
130+
x=robot_pose.position.x + 1, y=robot_pose.position.y
131+
)
132+
else:
133+
_buffer = tf.Buffer(cache_time=rospy.Duration.from_sec(10.0))
134+
_listener = tf.TransformListener(_buffer)
135+
person_pose = PoseStamped(
136+
header=Header(frame_id="map"),
137+
pose=Pose(
138+
position=userdata.detection.point,
139+
orientation=Quaternion(0, 0, 0, 1),
140+
),
141+
)
142+
trans = _buffer.lookup_transform(
143+
"odom", person_pose.header.frame_id, rospy.Time(0), rospy.Duration(1.0)
144+
)
145+
pose = do_transform_pose(person_pose, trans)
146+
# userdata.person_point = pose.pose.position
147+
create_and_publish_marker(
148+
self.person_point_pub,
149+
PointStamped(header=pose.header, point=pose.pose.position),
150+
name="person_point_odom",
151+
r=0.0,
152+
g=0.0,
153+
b=1.0,
154+
)
155+
userdata.person_point = pose.pose.position
118156

119157
cv2_gesture_img = cv2_img.msg_to_cv2_img(userdata.img_msg)
120158
# Add text to the image
121159
cv2.putText(
122160
cv2_gesture_img,
123-
self.gesture_to_detect,
161+
detected_gesture,
124162
(10, 30),
125163
cv2.FONT_HERSHEY_SIMPLEX,
126164
1,

0 commit comments

Comments
 (0)