From b5930f6bdbb7ddb82fc4125301de59c503cd7e21 Mon Sep 17 00:00:00 2001 From: Jared Swift Date: Mon, 6 May 2024 19:08:28 +0100 Subject: [PATCH] feat: DetectFaces and Recognise skills. --- skills/src/lasr_skills/__init__.py | 2 ++ skills/src/lasr_skills/detect_faces.py | 25 ++++++++++++++++++++ skills/src/lasr_skills/recognise.py | 32 ++++++++++++++++++++++++++ 3 files changed, 59 insertions(+) create mode 100644 skills/src/lasr_skills/detect_faces.py create mode 100644 skills/src/lasr_skills/recognise.py diff --git a/skills/src/lasr_skills/__init__.py b/skills/src/lasr_skills/__init__.py index a1931c23d..41bcd42c8 100755 --- a/skills/src/lasr_skills/__init__.py +++ b/skills/src/lasr_skills/__init__.py @@ -17,3 +17,5 @@ from .handover_object import HandoverObject from .ask_and_listen import AskAndListen from .clip_vqa import QueryImage +from .detect_faces import DetectFaces +from .recognise import Recognise diff --git a/skills/src/lasr_skills/detect_faces.py b/skills/src/lasr_skills/detect_faces.py new file mode 100644 index 000000000..c0605b3f8 --- /dev/null +++ b/skills/src/lasr_skills/detect_faces.py @@ -0,0 +1,25 @@ +import rospy +import smach + +from lasr_vision_msgs.srv import DetectFaces as DetectFacesSrv +from sensor_msgs.msg import Image + + +class DetectFaces(smach.State): + + def __init__(self, image_topic: str = "/xtion/rgb/image_raw"): + smach.State.__init__( + self, outcomes=["succeeded", "failed"], output_keys=["detections"] + ) + self._image_topic = image_topic + self._detect_faces = rospy.ServiceProxy("/detect_faces", DetectFacesSrv) + + def execute(self, userdata): + img_msg = rospy.wait_for_message(self._image_topic, Image) + try: + result = self._detect_faces(img_msg) + userdata.detections = result + return "succeeded" + except rospy.ServiceException as e: + rospy.logwarn(f"Unable to perform inference. ({str(e)})") + return "failed" diff --git a/skills/src/lasr_skills/recognise.py b/skills/src/lasr_skills/recognise.py new file mode 100644 index 000000000..e3770a897 --- /dev/null +++ b/skills/src/lasr_skills/recognise.py @@ -0,0 +1,32 @@ +import rospy +import smach + +from lasr_vision_msgs.srv import Recognise as RecogniseSrv +from sensor_msgs.msg import Image + + +class Recognise(smach.State): + + def __init__( + self, + dataset: str, + confidence: float = 0.5, + image_topic: str = "/xtion/rgb/image_raw", + ): + smach.State.__init__( + self, outcomes=["succeeded", "failed"], output_keys=["detections"] + ) + self._dataset = dataset + self._confidence = confidence + self._image_topic = image_topic + self._recognise = rospy.ServiceProxy("/recognise", RecogniseSrv) + + def execute(self, userdata): + img_msg = rospy.wait_for_message(self._image_topic, Image) + try: + result = self._recognise(img_msg, self._dataset, self._confidence) + userdata.detections = result + return "succeeded" + except rospy.ServiceException as e: + rospy.logwarn(f"Unable to perform inference. ({str(e)})") + return "failed"