diff --git a/depthai_nodes/ml/parsers/image_output.py b/depthai_nodes/ml/parsers/image_output.py index e60bb09..be25053 100644 --- a/depthai_nodes/ml/parsers/image_output.py +++ b/depthai_nodes/ml/parsers/image_output.py @@ -58,7 +58,7 @@ def run(self): raise ValueError( f"Expected 1 output layer, got {len(output_layer_names)}." ) - output = output.getTensor(output_layer_names[0]) + output = output.getTensor(output_layer_names[0], dequantize=True) if len(output.shape) == 4: image = output[0] diff --git a/depthai_nodes/ml/parsers/keypoints.py b/depthai_nodes/ml/parsers/keypoints.py index a501ba1..07d93f9 100644 --- a/depthai_nodes/ml/parsers/keypoints.py +++ b/depthai_nodes/ml/parsers/keypoints.py @@ -87,7 +87,9 @@ def run(self): f"Expected 1 output layer, got {len(output_layer_names)}." ) - keypoints = output.getTensor(output_layer_names[0]) + keypoints = output.getTensor(output_layer_names[0], dequantize=True).astype( + np.float32 + ) num_coords = int(np.prod(keypoints.shape) / self.num_keypoints) if num_coords not in [2, 3]: diff --git a/depthai_nodes/ml/parsers/mediapipe_hand_landmarker.py b/depthai_nodes/ml/parsers/mediapipe_hand_landmarker.py index effbc0f..346b881 100644 --- a/depthai_nodes/ml/parsers/mediapipe_hand_landmarker.py +++ b/depthai_nodes/ml/parsers/mediapipe_hand_landmarker.py @@ -71,9 +71,21 @@ def run(self): except dai.MessageQueue.QueueException: break # Pipeline was stopped - landmarks = output.getTensor("Identity").reshape(21, 3).astype(np.float32) - hand_score = output.getTensor("Identity_1").reshape(-1).astype(np.float32) - handedness = output.getTensor("Identity_2").reshape(-1).astype(np.float32) + landmarks = ( + output.getTensor("Identity", dequantize=True) + .reshape(21, 3) + .astype(np.float32) + ) + hand_score = ( + output.getTensor("Identity_1", dequantize=True) + .reshape(-1) + .astype(np.float32) + ) + handedness = ( + output.getTensor("Identity_2", dequantize=True) + .reshape(-1) + .astype(np.float32) + ) hand_score = hand_score[0] handedness = handedness[0] diff --git a/depthai_nodes/ml/parsers/mediapipe_palm_detection.py b/depthai_nodes/ml/parsers/mediapipe_palm_detection.py index aeacd17..2f297d7 100644 --- a/depthai_nodes/ml/parsers/mediapipe_palm_detection.py +++ b/depthai_nodes/ml/parsers/mediapipe_palm_detection.py @@ -85,8 +85,16 @@ def run(self): except dai.MessageQueue.QueueException: break # Pipeline was stopped - bboxes = output.getTensor("Identity").reshape(2016, 18).astype(np.float32) - scores = output.getTensor("Identity_1").reshape(2016).astype(np.float32) + bboxes = ( + output.getTensor("Identity", dequantize=True) + .reshape(2016, 18) + .astype(np.float32) + ) + scores = ( + output.getTensor("Identity_1", dequantize=True) + .reshape(2016) + .astype(np.float32) + ) decoded_bboxes = generate_anchors_and_decode( bboxes=bboxes, scores=scores, threshold=self.score_threshold, scale=192 diff --git a/depthai_nodes/ml/parsers/segmentation.py b/depthai_nodes/ml/parsers/segmentation.py index d4f9e0b..e141c3a 100644 --- a/depthai_nodes/ml/parsers/segmentation.py +++ b/depthai_nodes/ml/parsers/segmentation.py @@ -62,9 +62,11 @@ def run(self): f"Expected 1 output layer, got {len(output_layer_names)}." ) - segmentation_mask = output.getTensor(output_layer_names[0])[ - 0 - ] # num_clases x H x W + segmentation_mask = output.getTensor(output_layer_names[0], dequantize=True) + if len(segmentation_mask.shape) == 4: + segmentation_mask = segmentation_mask[0] + else: + segmentation_mask = segmentation_mask.transpose(2, 0, 1) if len(segmentation_mask.shape) != 3: raise ValueError( diff --git a/depthai_nodes/ml/parsers/superanimal_landmarker.py b/depthai_nodes/ml/parsers/superanimal_landmarker.py index 9b0bdee..31ed393 100644 --- a/depthai_nodes/ml/parsers/superanimal_landmarker.py +++ b/depthai_nodes/ml/parsers/superanimal_landmarker.py @@ -68,7 +68,10 @@ def run(self): except dai.MessageQueue.QueueException: break # Pipeline was stopped - heatmaps = output.getTensor("heatmaps").astype(np.float32) + heatmaps = output.getTensor("heatmaps", dequantize=True).astype(np.float32) + + if len(heatmaps.shape) == 3: + heatmaps = heatmaps.reshape((1,) + heatmaps.shape) heatmaps_scale_factor = ( self.scale_factor / heatmaps.shape[1], diff --git a/depthai_nodes/ml/parsers/xfeat.py b/depthai_nodes/ml/parsers/xfeat.py index f7ecf63..6d4a451 100644 --- a/depthai_nodes/ml/parsers/xfeat.py +++ b/depthai_nodes/ml/parsers/xfeat.py @@ -82,8 +82,17 @@ def run(self): except dai.MessageQueue.QueueException: break # Pipeline was stopped - feats = output.getTensor("feats").astype(np.float32) - keypoints = output.getTensor("keypoints").astype(np.float32) + feats = output.getTensor("feats", dequantize=True).astype(np.float32) + keypoints = output.getTensor("keypoints", dequantize=True).astype( + np.float32 + ) + + if len(feats.shape) == 3: + feats = feats.reshape((1,) + feats.shape).transpose(0, 3, 1, 2) + if len(keypoints.shape) == 3: + keypoints = keypoints.reshape((1,) + keypoints.shape).transpose( + 0, 3, 1, 2 + ) result = detect_and_compute( feats, keypoints, resize_rate_w, resize_rate_h, self.input_size