From afdbf1dd8d13a3f811551311fc134470d1ed57a0 Mon Sep 17 00:00:00 2001 From: muhmammadawaisofficial Date: Mon, 9 Feb 2026 01:39:12 +0500 Subject: [PATCH 1/2] docs: Add manual download instructions for models hosted on Hugging Face --- .gitattributes | 23 - .gitignore | 9 - LICENSE | 201 - README.md | 3 + benchmark/README.md | 987 ---- benchmark/benchmark.py | 232 - benchmark/color_table.svg | 5161 ----------------- benchmark/config/face_detection_yunet.yaml | 19 - benchmark/config/face_recognition_sface.yaml | 14 - .../config/facial_expression_recognition.yaml | 14 - .../config/handpose_estimation_mediapipe.yaml | 17 - .../config/human_segmentation_pphumanseg.yaml | 16 - .../image_classification_mobilenet.yaml | 17 - .../config/image_classification_ppresnet.yaml | 17 - .../config/license_plate_detection_yunet.yaml | 20 - .../config/object_detection_nanodet.yaml | 18 - benchmark/config/object_detection_yolox.yaml | 19 - .../config/object_tracking_vittrack.yaml | 14 - .../config/palm_detection_mediapipe.yaml | 19 - .../config/person_detection_mediapipe.yaml | 19 - benchmark/config/person_reid_youtureid.yaml | 15 - .../config/pose_estimation_mediapipe.yaml | 17 - benchmark/config/qrcode_wechatqrcode.yaml | 16 - benchmark/config/text_detection_ppocr.yaml | 20 - benchmark/config/text_recognition_crnn.yaml | 14 - benchmark/data/.gitignore | 2 - benchmark/download_data.py | 237 - benchmark/generate_table.py | 277 - benchmark/requirements.txt | 5 - benchmark/table_config.yaml | 246 - benchmark/utils/__init__.py | 5 - benchmark/utils/dataloaders/__init__.py | 6 - benchmark/utils/dataloaders/base.py | 12 - .../utils/dataloaders/base_dataloader.py | 84 - benchmark/utils/dataloaders/classification.py | 42 - benchmark/utils/dataloaders/recognition.py | 33 - benchmark/utils/dataloaders/tracking.py | 27 - benchmark/utils/factory.py | 20 - benchmark/utils/metrics/__init__.py | 6 - benchmark/utils/metrics/base.py | 24 - benchmark/utils/metrics/base_metric.py | 41 - benchmark/utils/metrics/detection.py | 29 - benchmark/utils/metrics/recognition.py | 31 - benchmark/utils/metrics/tracking.py | 26 - benchmark/utils/timer.py | 20 - models/__init__.py | 102 - models/deblurring_nafnet/CMakeLists.txt | 11 - models/deblurring_nafnet/LICENSE | 228 - models/deblurring_nafnet/README.md | 54 - .../deblurring_nafnet_2025may.onnx | 3 - models/deblurring_nafnet/demo.cpp | 89 - models/deblurring_nafnet/demo.py | 41 - .../example_outputs/licenseplate_motion.jpg | 3 - .../licenseplate_motion_output.jpg | 3 - models/deblurring_nafnet/nafnet.py | 36 - models/edge_detection_dexined/CMakeLists.txt | 11 - models/edge_detection_dexined/LICENSE | 21 - models/edge_detection_dexined/README.md | 55 - models/edge_detection_dexined/demo.cpp | 138 - models/edge_detection_dexined/demo.py | 51 - models/edge_detection_dexined/dexined.py | 50 - .../edge_detection_dexined_2024sep.onnx | 3 - .../example_outputs/chicky.jpg | 3 - .../example_outputs/chicky_output.jpg | 3 - models/face_detection_yunet/CMakeLists.txt | 11 - models/face_detection_yunet/LICENSE | 21 - models/face_detection_yunet/README.md | 3 + models/face_detection_yunet/demo.cpp | 213 - models/face_detection_yunet/demo.py | 146 - .../example_outputs/largest_selfie.jpg | 3 - .../example_outputs/yunet_demo.gif | 3 - .../face_detection_yunet_2023mar.onnx | 3 - .../face_detection_yunet_2023mar_int8.onnx | 3 - .../face_detection_yunet_2023mar_int8bq.onnx | 3 - models/face_detection_yunet/yunet.py | 55 - .../LICENSE | 395 -- .../README.md | 54 - .../demo.py | 155 - .../ediffiqa.py | 45 - .../ediffiqa_tiny_jun2024.onnx | 3 - .../example_outputs/demo.jpg | 3 - .../quality_distribution.png | 3 - models/face_recognition_sface/CMakeLists.txt | 11 - models/face_recognition_sface/LICENSE | 202 - models/face_recognition_sface/README.md | 68 - models/face_recognition_sface/demo.cpp | 322 - models/face_recognition_sface/demo.py | 156 - .../example_outputs/demo.jpg | 3 - .../face_recognition_sface_2021dec.onnx | 3 - .../face_recognition_sface_2021dec_int8.onnx | 3 - ...face_recognition_sface_2021dec_int8bq.onnx | 3 - models/face_recognition_sface/sface.py | 63 - .../CMakeLists.txt | 30 - .../facial_expression_recognition/README.md | 59 - models/facial_expression_recognition/demo.cpp | 304 - models/facial_expression_recognition/demo.py | 135 - .../example_outputs/selfie.jpg | 3 - ...on_recognition_mobilefacenet_2022july.onnx | 3 - ...cognition_mobilefacenet_2022july_int8.onnx | 3 - ...gnition_mobilefacenet_2022july_int8bq.onnx | 3 - .../facial_fer_model.py | 176 - models/handpose_estimation_mediapipe/LICENSE | 202 - .../handpose_estimation_mediapipe/README.md | 42 - models/handpose_estimation_mediapipe/demo.py | 356 -- .../gesture_classification.png | 3 - .../example_outputs/hand_keypoints.png | 3 - .../example_outputs/mphandpose_demo.webp | 3 - ...handpose_estimation_mediapipe_2023feb.onnx | 3 - ...ose_estimation_mediapipe_2023feb_int8.onnx | 3 - ...e_estimation_mediapipe_2023feb_int8bq.onnx | 3 - .../mp_handpose.py | 200 - .../CMakeLists.txt | 31 - models/human_segmentation_pphumanseg/LICENSE | 203 - .../human_segmentation_pphumanseg/README.md | 69 - models/human_segmentation_pphumanseg/demo.cpp | 226 - models/human_segmentation_pphumanseg/demo.py | 162 - .../example_outputs/messi.jpg | 3 - .../example_outputs/pphumanseg_demo.gif | 3 - ...human_segmentation_pphumanseg_2023mar.onnx | 3 - ..._segmentation_pphumanseg_2023mar_int8.onnx | 3 - ...egmentation_pphumanseg_2023mar_int8bq.onnx | 3 - .../pphumanseg.py | 69 - .../CMakeLists.txt | 29 - models/image_classification_mobilenet/LICENSE | 202 - .../image_classification_mobilenet/README.md | 67 - .../image_classification_mobilenet/demo.cpp | 133 - models/image_classification_mobilenet/demo.py | 56 - ...ge_classification_mobilenetv1_2022apr.onnx | 3 - ...assification_mobilenetv1_2022apr_int8.onnx | 3 - ...sification_mobilenetv1_2022apr_int8bq.onnx | 3 - ...ge_classification_mobilenetv2_2022apr.onnx | 3 - ...assification_mobilenetv2_2022apr_int8.onnx | 3 - ...sification_mobilenetv2_2022apr_int8bq.onnx | 3 - .../labelsimagenet1k.h | 1010 ---- .../mobilenet.py | 1078 ---- .../CMakeLists.txt | 32 - models/image_classification_ppresnet/LICENSE | 203 - .../image_classification_ppresnet/README.md | 60 - models/image_classification_ppresnet/demo.cpp | 1123 ---- models/image_classification_ppresnet/demo.py | 67 - ...age_classification_ppresnet50_2022jan.onnx | 3 - ...lassification_ppresnet50_2022jan_int8.onnx | 3 - ...ssification_ppresnet50_2022jan_int8bq.onnx | 3 - .../image_classification_ppresnet/ppresnet.py | 1083 ---- .../image_segmentation_efficientsam/LICENSE | 201 - .../image_segmentation_efficientsam/README.md | 52 - .../image_segmentation_efficientsam/demo.py | 247 - .../efficientSAM.py | 136 - .../example_outputs/example1.png | 3 - .../example_outputs/example2.png | 3 - .../example_outputs/sam_present.gif | 3 - ..._segmentation_efficientsam_ti_2024may.onnx | 3 - ...egmentation_efficientsam_ti_2025april.onnx | 3 - ...tation_efficientsam_ti_2025april_int8.onnx | 3 - models/inpainting_lama/CMakeLists.txt | 11 - models/inpainting_lama/LICENSE | 201 - models/inpainting_lama/README.md | 49 - models/inpainting_lama/demo.cpp | 174 - models/inpainting_lama/demo.py | 87 - .../example_outputs/squirrel.jpg | 3 - .../example_outputs/squirrel_output.jpg | 3 - .../inpainting_lama_2025jan.onnx | 3 - models/inpainting_lama/lama.py | 43 - models/license_plate_detection_yunet/LICENSE | 203 - .../license_plate_detection_yunet/README.md | 33 - models/license_plate_detection_yunet/demo.py | 130 - .../example_outputs/lpd_yunet_demo.gif | 3 - .../example_outputs/result-1.jpg | 3 - .../example_outputs/result-2.jpg | 3 - .../example_outputs/result-3.jpg | 3 - .../example_outputs/result-4.jpg | 3 - ...nse_plate_detection_lpd_yunet_2023mar.onnx | 3 - ...late_detection_lpd_yunet_2023mar_int8.onnx | 3 - ...te_detection_lpd_yunet_2023mar_int8bq.onnx | 3 - .../lpd_yunet.py | 136 - .../object_detection_nanodet/CMakeLists.txt | 32 - models/object_detection_nanodet/LICENSE | 202 - models/object_detection_nanodet/README.md | 142 - models/object_detection_nanodet/demo.cpp | 503 -- models/object_detection_nanodet/demo.py | 182 - .../example_outputs/1_res.jpg | 3 - .../example_outputs/2_res.jpg | 3 - .../example_outputs/3_res.jpg | 3 - .../example_outputs/WebCamR.gif | 3 - models/object_detection_nanodet/nanodet.py | 122 - .../object_detection_nanodet_2022nov.onnx | 3 - ...object_detection_nanodet_2022nov_int8.onnx | 3 - ...ject_detection_nanodet_2022nov_int8bq.onnx | 3 - models/object_detection_yolox/CMakeLists.txt | 29 - models/object_detection_yolox/LICENSE | 201 - models/object_detection_yolox/README.md | 3 + models/object_detection_yolox/demo.cpp | 311 - models/object_detection_yolox/demo.py | 155 - .../example_outputs/1_res.jpg | 3 - .../example_outputs/2_res.jpg | 3 - .../example_outputs/3_res.jpg | 3 - .../object_detection_yolox_2022nov.onnx | 3 - .../object_detection_yolox_2022nov_int8.onnx | 3 - ...object_detection_yolox_2022nov_int8bq.onnx | 3 - models/object_detection_yolox/yolox.py | 85 - .../object_tracking_vittrack/CMakeLists.txt | 32 - models/object_tracking_vittrack/LICENSE | 202 - models/object_tracking_vittrack/README.md | 4 + models/object_tracking_vittrack/demo.cpp | 210 - models/object_tracking_vittrack/demo.py | 125 - .../example_outputs/vittrack_demo.gif | 3 - .../object_tracking_vittrack_2023sep.onnx | 3 - ...ject_tracking_vittrack_2023sep_int8bq.onnx | 3 - models/object_tracking_vittrack/vittrack.py | 39 - .../BSD-3-LICENSE.txt | 29 - .../MITLICENSE.txt | 21 - models/optical_flow_estimation_raft/README.md | 70 - models/optical_flow_estimation_raft/demo.py | 315 - .../example_outputs/result.jpg | 3 - .../example_outputs/vis.png | 3 - .../optical_flow_estimation_raft_2023aug.onnx | 3 - ...l_flow_estimation_raft_2023aug_int8bq.onnx | 3 - models/optical_flow_estimation_raft/raft.py | 53 - .../palm_detection_mediapipe/CMakeLists.txt | 11 - models/palm_detection_mediapipe/LICENSE | 202 - models/palm_detection_mediapipe/README.md | 60 - models/palm_detection_mediapipe/demo.cpp | 2379 -------- models/palm_detection_mediapipe/demo.py | 134 - .../example_outputs/mppalmdet_demo.gif | 3 - models/palm_detection_mediapipe/mp_palmdet.py | 2121 ------- .../palm_detection_mediapipe_2023feb.onnx | 3 - ...palm_detection_mediapipe_2023feb_int8.onnx | 3 - ...lm_detection_mediapipe_2023feb_int8bq.onnx | 3 - .../person_detection_mediapipe/CMakeLists.txt | 29 - models/person_detection_mediapipe/LICENSE | 202 - models/person_detection_mediapipe/README.md | 57 - models/person_detection_mediapipe/demo.cpp | 2522 -------- models/person_detection_mediapipe/demo.py | 140 - .../example_outputs/mppersondet_demo.webp | 3 - .../mp_persondet.py | 2366 -------- .../person_detection_mediapipe_2023mar.onnx | 3 - ...on_detection_mediapipe_2023mar_int8bq.onnx | 3 - models/person_reid_youtureid/CMakeLists.txt | 11 - models/person_reid_youtureid/LICENSE | 202 - models/person_reid_youtureid/README.md | 40 - models/person_reid_youtureid/demo.cpp | 308 - models/person_reid_youtureid/demo.py | 124 - .../person_reid_youtu_2021nov.onnx | 3 - .../person_reid_youtu_2021nov_int8.onnx | 3 - .../person_reid_youtu_2021nov_int8bq.onnx | 3 - models/person_reid_youtureid/youtureid.py | 67 - .../pose_estimation_mediapipe/CMakeLists.txt | 29 - models/pose_estimation_mediapipe/LICENSE | 202 - models/pose_estimation_mediapipe/README.md | 54 - models/pose_estimation_mediapipe/demo.cpp | 2850 --------- models/pose_estimation_mediapipe/demo.py | 253 - .../example_outputs/mpposeest_demo.webp | 3 - .../example_outputs/pose_landmarks.png | 3 - models/pose_estimation_mediapipe/mp_pose.py | 179 - .../pose_estimation_mediapipe_2023mar.onnx | 3 - ...e_estimation_mediapipe_2023mar_int8bq.onnx | 3 - models/qrcode_wechatqrcode/CMakeLists.txt | 11 - models/qrcode_wechatqrcode/LICENSE | 202 - models/qrcode_wechatqrcode/README.md | 54 - models/qrcode_wechatqrcode/demo.cpp | 192 - models/qrcode_wechatqrcode/demo.py | 136 - .../detect_2021nov.caffemodel | 3 - .../detect_2021nov.prototxt | 2716 --------- .../example_outputs/wechat_qrcode_demo.gif | 3 - .../qrcode_wechatqrcode/sr_2021nov.caffemodel | 3 - .../qrcode_wechatqrcode/sr_2021nov.prototxt | 403 -- models/qrcode_wechatqrcode/wechatqrcode.py | 34 - models/text_detection_ppocr/CMakeLists.txt | 29 - models/text_detection_ppocr/LICENSE | 203 - models/text_detection_ppocr/README.md | 61 - models/text_detection_ppocr/demo.cpp | 186 - models/text_detection_ppocr/demo.py | 155 - .../example_outputs/gsoc.jpg | 3 - .../example_outputs/mask.jpg | 3 - models/text_detection_ppocr/ppocr_det.py | 59 - .../text_detection_cn_ppocrv3_2023may.onnx | 3 - ...ext_detection_cn_ppocrv3_2023may_int8.onnx | 3 - ...t_detection_cn_ppocrv3_2023may_int8bq.onnx | 3 - .../text_detection_en_ppocrv3_2023may.onnx | 3 - ...ext_detection_en_ppocrv3_2023may_int8.onnx | 3 - ...t_detection_en_ppocrv3_2023may_int8bq.onnx | 3 - models/text_recognition_crnn/CMakeLists.txt | 29 - models/text_recognition_crnn/LICENSE | 202 - models/text_recognition_crnn/README.md | 103 - .../charset_32_94_3944.h | 4092 ------------- models/text_recognition_crnn/crnn.py | 4176 ------------- models/text_recognition_crnn/demo.cpp | 294 - models/text_recognition_crnn/demo.py | 169 - .../example_outputs/CRNNCTC.gif | 3 - .../example_outputs/demo.jpg | 3 - .../text_recognition_CRNN_CH_2021sep.onnx | 3 - ...xt_recognition_CRNN_CH_2021sep_int8bq.onnx | 3 - ...text_recognition_CRNN_CH_2022oct_int8.onnx | 3 - ...text_recognition_CRNN_CH_2023feb_fp16.onnx | 3 - .../text_recognition_CRNN_CN_2021nov.onnx | 3 - ...text_recognition_CRNN_CN_2021nov_int8.onnx | 3 - ...xt_recognition_CRNN_CN_2021nov_int8bq.onnx | 3 - .../text_recognition_CRNN_EN_2021sep.onnx | 3 - ...text_recognition_CRNN_EN_2022oct_int8.onnx | 3 - ...text_recognition_CRNN_EN_2023feb_fp16.onnx | 3 - .../assets/benchmark_table_4.9.0.png | 3 - .../opencv_zoo_report-cn-2023-4.9.0.md | 53 - .../opencv_zoo_report-en-2023-4.9.0.md | 56 - reports/README.md | 3 - tools/eval/README.md | 228 - tools/eval/datasets/__init__.py | 25 - tools/eval/datasets/icdar.py | 54 - tools/eval/datasets/iiit5k.py | 56 - tools/eval/datasets/imagenet.py | 65 - tools/eval/datasets/lfw.py | 239 - tools/eval/datasets/lfw_face_bboxes.npy | Bin 672128 -> 0 bytes tools/eval/datasets/minisupervisely.py | 202 - tools/eval/datasets/widerface.py | 315 - tools/eval/eval.py | 182 - tools/quantize/README.md | 69 - tools/quantize/block_quantize.py | 513 -- tools/quantize/inc_configs/fer.yaml | 38 - tools/quantize/inc_configs/lpd_yunet.yaml | 52 - tools/quantize/inc_configs/mobilenet.yaml | 98 - tools/quantize/inc_configs/mp_handpose.yaml | 52 - tools/quantize/quantize-inc.py | 150 - tools/quantize/quantize-ort.py | 149 - tools/quantize/requirements.txt | 6 - tools/quantize/transform.py | 129 - 324 files changed, 13 insertions(+), 54198 deletions(-) delete mode 100644 .gitattributes delete mode 100644 .gitignore delete mode 100644 LICENSE delete mode 100644 benchmark/README.md delete mode 100644 benchmark/benchmark.py delete mode 100644 benchmark/color_table.svg delete mode 100644 benchmark/config/face_detection_yunet.yaml delete mode 100644 benchmark/config/face_recognition_sface.yaml delete mode 100644 benchmark/config/facial_expression_recognition.yaml delete mode 100644 benchmark/config/handpose_estimation_mediapipe.yaml delete mode 100644 benchmark/config/human_segmentation_pphumanseg.yaml delete mode 100644 benchmark/config/image_classification_mobilenet.yaml delete mode 100644 benchmark/config/image_classification_ppresnet.yaml delete mode 100644 benchmark/config/license_plate_detection_yunet.yaml delete mode 100644 benchmark/config/object_detection_nanodet.yaml delete mode 100644 benchmark/config/object_detection_yolox.yaml delete mode 100644 benchmark/config/object_tracking_vittrack.yaml delete mode 100644 benchmark/config/palm_detection_mediapipe.yaml delete mode 100644 benchmark/config/person_detection_mediapipe.yaml delete mode 100644 benchmark/config/person_reid_youtureid.yaml delete mode 100644 benchmark/config/pose_estimation_mediapipe.yaml delete mode 100644 benchmark/config/qrcode_wechatqrcode.yaml delete mode 100644 benchmark/config/text_detection_ppocr.yaml delete mode 100644 benchmark/config/text_recognition_crnn.yaml delete mode 100644 benchmark/data/.gitignore delete mode 100644 benchmark/download_data.py delete mode 100644 benchmark/generate_table.py delete mode 100644 benchmark/requirements.txt delete mode 100644 benchmark/table_config.yaml delete mode 100644 benchmark/utils/__init__.py delete mode 100644 benchmark/utils/dataloaders/__init__.py delete mode 100644 benchmark/utils/dataloaders/base.py delete mode 100644 benchmark/utils/dataloaders/base_dataloader.py delete mode 100644 benchmark/utils/dataloaders/classification.py delete mode 100644 benchmark/utils/dataloaders/recognition.py delete mode 100644 benchmark/utils/dataloaders/tracking.py delete mode 100644 benchmark/utils/factory.py delete mode 100644 benchmark/utils/metrics/__init__.py delete mode 100644 benchmark/utils/metrics/base.py delete mode 100644 benchmark/utils/metrics/base_metric.py delete mode 100644 benchmark/utils/metrics/detection.py delete mode 100644 benchmark/utils/metrics/recognition.py delete mode 100644 benchmark/utils/metrics/tracking.py delete mode 100644 benchmark/utils/timer.py delete mode 100644 models/__init__.py delete mode 100644 models/deblurring_nafnet/CMakeLists.txt delete mode 100644 models/deblurring_nafnet/LICENSE delete mode 100644 models/deblurring_nafnet/README.md delete mode 100644 models/deblurring_nafnet/deblurring_nafnet_2025may.onnx delete mode 100644 models/deblurring_nafnet/demo.cpp delete mode 100644 models/deblurring_nafnet/demo.py delete mode 100644 models/deblurring_nafnet/example_outputs/licenseplate_motion.jpg delete mode 100644 models/deblurring_nafnet/example_outputs/licenseplate_motion_output.jpg delete mode 100644 models/deblurring_nafnet/nafnet.py delete mode 100644 models/edge_detection_dexined/CMakeLists.txt delete mode 100644 models/edge_detection_dexined/LICENSE delete mode 100644 models/edge_detection_dexined/README.md delete mode 100644 models/edge_detection_dexined/demo.cpp delete mode 100644 models/edge_detection_dexined/demo.py delete mode 100644 models/edge_detection_dexined/dexined.py delete mode 100644 models/edge_detection_dexined/edge_detection_dexined_2024sep.onnx delete mode 100644 models/edge_detection_dexined/example_outputs/chicky.jpg delete mode 100644 models/edge_detection_dexined/example_outputs/chicky_output.jpg delete mode 100644 models/face_detection_yunet/CMakeLists.txt delete mode 100644 models/face_detection_yunet/LICENSE delete mode 100644 models/face_detection_yunet/demo.cpp delete mode 100644 models/face_detection_yunet/demo.py delete mode 100644 models/face_detection_yunet/example_outputs/largest_selfie.jpg delete mode 100644 models/face_detection_yunet/example_outputs/yunet_demo.gif delete mode 100644 models/face_detection_yunet/face_detection_yunet_2023mar.onnx delete mode 100644 models/face_detection_yunet/face_detection_yunet_2023mar_int8.onnx delete mode 100644 models/face_detection_yunet/face_detection_yunet_2023mar_int8bq.onnx delete mode 100644 models/face_detection_yunet/yunet.py delete mode 100644 models/face_image_quality_assessment_ediffiqa/LICENSE delete mode 100644 models/face_image_quality_assessment_ediffiqa/README.md delete mode 100644 models/face_image_quality_assessment_ediffiqa/demo.py delete mode 100644 models/face_image_quality_assessment_ediffiqa/ediffiqa.py delete mode 100644 models/face_image_quality_assessment_ediffiqa/ediffiqa_tiny_jun2024.onnx delete mode 100644 models/face_image_quality_assessment_ediffiqa/example_outputs/demo.jpg delete mode 100644 models/face_image_quality_assessment_ediffiqa/quality_distribution.png delete mode 100644 models/face_recognition_sface/CMakeLists.txt delete mode 100644 models/face_recognition_sface/LICENSE delete mode 100644 models/face_recognition_sface/README.md delete mode 100644 models/face_recognition_sface/demo.cpp delete mode 100644 models/face_recognition_sface/demo.py delete mode 100644 models/face_recognition_sface/example_outputs/demo.jpg delete mode 100644 models/face_recognition_sface/face_recognition_sface_2021dec.onnx delete mode 100644 models/face_recognition_sface/face_recognition_sface_2021dec_int8.onnx delete mode 100644 models/face_recognition_sface/face_recognition_sface_2021dec_int8bq.onnx delete mode 100644 models/face_recognition_sface/sface.py delete mode 100644 models/facial_expression_recognition/CMakeLists.txt delete mode 100644 models/facial_expression_recognition/README.md delete mode 100644 models/facial_expression_recognition/demo.cpp delete mode 100644 models/facial_expression_recognition/demo.py delete mode 100644 models/facial_expression_recognition/example_outputs/selfie.jpg delete mode 100644 models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july.onnx delete mode 100644 models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8.onnx delete mode 100644 models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8bq.onnx delete mode 100644 models/facial_expression_recognition/facial_fer_model.py delete mode 100644 models/handpose_estimation_mediapipe/LICENSE delete mode 100644 models/handpose_estimation_mediapipe/README.md delete mode 100644 models/handpose_estimation_mediapipe/demo.py delete mode 100644 models/handpose_estimation_mediapipe/example_outputs/gesture_classification.png delete mode 100644 models/handpose_estimation_mediapipe/example_outputs/hand_keypoints.png delete mode 100644 models/handpose_estimation_mediapipe/example_outputs/mphandpose_demo.webp delete mode 100644 models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb.onnx delete mode 100644 models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8.onnx delete mode 100644 models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8bq.onnx delete mode 100644 models/handpose_estimation_mediapipe/mp_handpose.py delete mode 100644 models/human_segmentation_pphumanseg/CMakeLists.txt delete mode 100644 models/human_segmentation_pphumanseg/LICENSE delete mode 100644 models/human_segmentation_pphumanseg/README.md delete mode 100644 models/human_segmentation_pphumanseg/demo.cpp delete mode 100644 models/human_segmentation_pphumanseg/demo.py delete mode 100644 models/human_segmentation_pphumanseg/example_outputs/messi.jpg delete mode 100644 models/human_segmentation_pphumanseg/example_outputs/pphumanseg_demo.gif delete mode 100644 models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar.onnx delete mode 100644 models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8.onnx delete mode 100644 models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8bq.onnx delete mode 100644 models/human_segmentation_pphumanseg/pphumanseg.py delete mode 100644 models/image_classification_mobilenet/CMakeLists.txt delete mode 100644 models/image_classification_mobilenet/LICENSE delete mode 100644 models/image_classification_mobilenet/README.md delete mode 100644 models/image_classification_mobilenet/demo.cpp delete mode 100644 models/image_classification_mobilenet/demo.py delete mode 100644 models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr.onnx delete mode 100644 models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8.onnx delete mode 100644 models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8bq.onnx delete mode 100644 models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr.onnx delete mode 100644 models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8.onnx delete mode 100644 models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8bq.onnx delete mode 100644 models/image_classification_mobilenet/labelsimagenet1k.h delete mode 100644 models/image_classification_mobilenet/mobilenet.py delete mode 100644 models/image_classification_ppresnet/CMakeLists.txt delete mode 100644 models/image_classification_ppresnet/LICENSE delete mode 100644 models/image_classification_ppresnet/README.md delete mode 100644 models/image_classification_ppresnet/demo.cpp delete mode 100644 models/image_classification_ppresnet/demo.py delete mode 100644 models/image_classification_ppresnet/image_classification_ppresnet50_2022jan.onnx delete mode 100644 models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8.onnx delete mode 100644 models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8bq.onnx delete mode 100644 models/image_classification_ppresnet/ppresnet.py delete mode 100644 models/image_segmentation_efficientsam/LICENSE delete mode 100644 models/image_segmentation_efficientsam/README.md delete mode 100644 models/image_segmentation_efficientsam/demo.py delete mode 100644 models/image_segmentation_efficientsam/efficientSAM.py delete mode 100644 models/image_segmentation_efficientsam/example_outputs/example1.png delete mode 100644 models/image_segmentation_efficientsam/example_outputs/example2.png delete mode 100644 models/image_segmentation_efficientsam/example_outputs/sam_present.gif delete mode 100644 models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2024may.onnx delete mode 100644 models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april.onnx delete mode 100644 models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april_int8.onnx delete mode 100644 models/inpainting_lama/CMakeLists.txt delete mode 100644 models/inpainting_lama/LICENSE delete mode 100644 models/inpainting_lama/README.md delete mode 100644 models/inpainting_lama/demo.cpp delete mode 100644 models/inpainting_lama/demo.py delete mode 100644 models/inpainting_lama/example_outputs/squirrel.jpg delete mode 100644 models/inpainting_lama/example_outputs/squirrel_output.jpg delete mode 100644 models/inpainting_lama/inpainting_lama_2025jan.onnx delete mode 100644 models/inpainting_lama/lama.py delete mode 100644 models/license_plate_detection_yunet/LICENSE delete mode 100644 models/license_plate_detection_yunet/README.md delete mode 100644 models/license_plate_detection_yunet/demo.py delete mode 100644 models/license_plate_detection_yunet/example_outputs/lpd_yunet_demo.gif delete mode 100644 models/license_plate_detection_yunet/example_outputs/result-1.jpg delete mode 100644 models/license_plate_detection_yunet/example_outputs/result-2.jpg delete mode 100644 models/license_plate_detection_yunet/example_outputs/result-3.jpg delete mode 100644 models/license_plate_detection_yunet/example_outputs/result-4.jpg delete mode 100644 models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar.onnx delete mode 100644 models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8.onnx delete mode 100644 models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8bq.onnx delete mode 100644 models/license_plate_detection_yunet/lpd_yunet.py delete mode 100644 models/object_detection_nanodet/CMakeLists.txt delete mode 100644 models/object_detection_nanodet/LICENSE delete mode 100644 models/object_detection_nanodet/README.md delete mode 100644 models/object_detection_nanodet/demo.cpp delete mode 100644 models/object_detection_nanodet/demo.py delete mode 100644 models/object_detection_nanodet/example_outputs/1_res.jpg delete mode 100644 models/object_detection_nanodet/example_outputs/2_res.jpg delete mode 100644 models/object_detection_nanodet/example_outputs/3_res.jpg delete mode 100644 models/object_detection_nanodet/example_outputs/WebCamR.gif delete mode 100644 models/object_detection_nanodet/nanodet.py delete mode 100644 models/object_detection_nanodet/object_detection_nanodet_2022nov.onnx delete mode 100644 models/object_detection_nanodet/object_detection_nanodet_2022nov_int8.onnx delete mode 100644 models/object_detection_nanodet/object_detection_nanodet_2022nov_int8bq.onnx delete mode 100644 models/object_detection_yolox/CMakeLists.txt delete mode 100644 models/object_detection_yolox/LICENSE delete mode 100644 models/object_detection_yolox/demo.cpp delete mode 100644 models/object_detection_yolox/demo.py delete mode 100644 models/object_detection_yolox/example_outputs/1_res.jpg delete mode 100644 models/object_detection_yolox/example_outputs/2_res.jpg delete mode 100644 models/object_detection_yolox/example_outputs/3_res.jpg delete mode 100644 models/object_detection_yolox/object_detection_yolox_2022nov.onnx delete mode 100644 models/object_detection_yolox/object_detection_yolox_2022nov_int8.onnx delete mode 100644 models/object_detection_yolox/object_detection_yolox_2022nov_int8bq.onnx delete mode 100644 models/object_detection_yolox/yolox.py delete mode 100644 models/object_tracking_vittrack/CMakeLists.txt delete mode 100644 models/object_tracking_vittrack/LICENSE delete mode 100644 models/object_tracking_vittrack/demo.cpp delete mode 100644 models/object_tracking_vittrack/demo.py delete mode 100644 models/object_tracking_vittrack/example_outputs/vittrack_demo.gif delete mode 100644 models/object_tracking_vittrack/object_tracking_vittrack_2023sep.onnx delete mode 100644 models/object_tracking_vittrack/object_tracking_vittrack_2023sep_int8bq.onnx delete mode 100644 models/object_tracking_vittrack/vittrack.py delete mode 100644 models/optical_flow_estimation_raft/BSD-3-LICENSE.txt delete mode 100644 models/optical_flow_estimation_raft/MITLICENSE.txt delete mode 100644 models/optical_flow_estimation_raft/README.md delete mode 100644 models/optical_flow_estimation_raft/demo.py delete mode 100644 models/optical_flow_estimation_raft/example_outputs/result.jpg delete mode 100644 models/optical_flow_estimation_raft/example_outputs/vis.png delete mode 100644 models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug.onnx delete mode 100644 models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug_int8bq.onnx delete mode 100644 models/optical_flow_estimation_raft/raft.py delete mode 100644 models/palm_detection_mediapipe/CMakeLists.txt delete mode 100644 models/palm_detection_mediapipe/LICENSE delete mode 100644 models/palm_detection_mediapipe/README.md delete mode 100644 models/palm_detection_mediapipe/demo.cpp delete mode 100644 models/palm_detection_mediapipe/demo.py delete mode 100644 models/palm_detection_mediapipe/example_outputs/mppalmdet_demo.gif delete mode 100644 models/palm_detection_mediapipe/mp_palmdet.py delete mode 100644 models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb.onnx delete mode 100644 models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8.onnx delete mode 100644 models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8bq.onnx delete mode 100644 models/person_detection_mediapipe/CMakeLists.txt delete mode 100644 models/person_detection_mediapipe/LICENSE delete mode 100644 models/person_detection_mediapipe/README.md delete mode 100644 models/person_detection_mediapipe/demo.cpp delete mode 100644 models/person_detection_mediapipe/demo.py delete mode 100644 models/person_detection_mediapipe/example_outputs/mppersondet_demo.webp delete mode 100644 models/person_detection_mediapipe/mp_persondet.py delete mode 100644 models/person_detection_mediapipe/person_detection_mediapipe_2023mar.onnx delete mode 100644 models/person_detection_mediapipe/person_detection_mediapipe_2023mar_int8bq.onnx delete mode 100644 models/person_reid_youtureid/CMakeLists.txt delete mode 100644 models/person_reid_youtureid/LICENSE delete mode 100644 models/person_reid_youtureid/README.md delete mode 100644 models/person_reid_youtureid/demo.cpp delete mode 100644 models/person_reid_youtureid/demo.py delete mode 100644 models/person_reid_youtureid/person_reid_youtu_2021nov.onnx delete mode 100644 models/person_reid_youtureid/person_reid_youtu_2021nov_int8.onnx delete mode 100644 models/person_reid_youtureid/person_reid_youtu_2021nov_int8bq.onnx delete mode 100644 models/person_reid_youtureid/youtureid.py delete mode 100644 models/pose_estimation_mediapipe/CMakeLists.txt delete mode 100644 models/pose_estimation_mediapipe/LICENSE delete mode 100644 models/pose_estimation_mediapipe/README.md delete mode 100644 models/pose_estimation_mediapipe/demo.cpp delete mode 100644 models/pose_estimation_mediapipe/demo.py delete mode 100644 models/pose_estimation_mediapipe/example_outputs/mpposeest_demo.webp delete mode 100644 models/pose_estimation_mediapipe/example_outputs/pose_landmarks.png delete mode 100644 models/pose_estimation_mediapipe/mp_pose.py delete mode 100644 models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar.onnx delete mode 100644 models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar_int8bq.onnx delete mode 100644 models/qrcode_wechatqrcode/CMakeLists.txt delete mode 100644 models/qrcode_wechatqrcode/LICENSE delete mode 100644 models/qrcode_wechatqrcode/README.md delete mode 100644 models/qrcode_wechatqrcode/demo.cpp delete mode 100644 models/qrcode_wechatqrcode/demo.py delete mode 100644 models/qrcode_wechatqrcode/detect_2021nov.caffemodel delete mode 100644 models/qrcode_wechatqrcode/detect_2021nov.prototxt delete mode 100644 models/qrcode_wechatqrcode/example_outputs/wechat_qrcode_demo.gif delete mode 100644 models/qrcode_wechatqrcode/sr_2021nov.caffemodel delete mode 100644 models/qrcode_wechatqrcode/sr_2021nov.prototxt delete mode 100644 models/qrcode_wechatqrcode/wechatqrcode.py delete mode 100644 models/text_detection_ppocr/CMakeLists.txt delete mode 100644 models/text_detection_ppocr/LICENSE delete mode 100644 models/text_detection_ppocr/README.md delete mode 100644 models/text_detection_ppocr/demo.cpp delete mode 100644 models/text_detection_ppocr/demo.py delete mode 100644 models/text_detection_ppocr/example_outputs/gsoc.jpg delete mode 100644 models/text_detection_ppocr/example_outputs/mask.jpg delete mode 100644 models/text_detection_ppocr/ppocr_det.py delete mode 100644 models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may.onnx delete mode 100644 models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8.onnx delete mode 100644 models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8bq.onnx delete mode 100644 models/text_detection_ppocr/text_detection_en_ppocrv3_2023may.onnx delete mode 100644 models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8.onnx delete mode 100644 models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8bq.onnx delete mode 100644 models/text_recognition_crnn/CMakeLists.txt delete mode 100644 models/text_recognition_crnn/LICENSE delete mode 100644 models/text_recognition_crnn/README.md delete mode 100644 models/text_recognition_crnn/charset_32_94_3944.h delete mode 100644 models/text_recognition_crnn/crnn.py delete mode 100644 models/text_recognition_crnn/demo.cpp delete mode 100644 models/text_recognition_crnn/demo.py delete mode 100644 models/text_recognition_crnn/example_outputs/CRNNCTC.gif delete mode 100644 models/text_recognition_crnn/example_outputs/demo.jpg delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep_int8bq.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_CH_2022oct_int8.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_CH_2023feb_fp16.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8bq.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_EN_2021sep.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_EN_2022oct_int8.onnx delete mode 100644 models/text_recognition_crnn/text_recognition_CRNN_EN_2023feb_fp16.onnx delete mode 100644 reports/2023-4.9.0/assets/benchmark_table_4.9.0.png delete mode 100644 reports/2023-4.9.0/opencv_zoo_report-cn-2023-4.9.0.md delete mode 100644 reports/2023-4.9.0/opencv_zoo_report-en-2023-4.9.0.md delete mode 100644 reports/README.md delete mode 100644 tools/eval/README.md delete mode 100644 tools/eval/datasets/__init__.py delete mode 100644 tools/eval/datasets/icdar.py delete mode 100644 tools/eval/datasets/iiit5k.py delete mode 100644 tools/eval/datasets/imagenet.py delete mode 100644 tools/eval/datasets/lfw.py delete mode 100644 tools/eval/datasets/lfw_face_bboxes.npy delete mode 100644 tools/eval/datasets/minisupervisely.py delete mode 100644 tools/eval/datasets/widerface.py delete mode 100644 tools/eval/eval.py delete mode 100644 tools/quantize/README.md delete mode 100644 tools/quantize/block_quantize.py delete mode 100644 tools/quantize/inc_configs/fer.yaml delete mode 100644 tools/quantize/inc_configs/lpd_yunet.yaml delete mode 100644 tools/quantize/inc_configs/mobilenet.yaml delete mode 100644 tools/quantize/inc_configs/mp_handpose.yaml delete mode 100644 tools/quantize/quantize-inc.py delete mode 100644 tools/quantize/quantize-ort.py delete mode 100644 tools/quantize/requirements.txt delete mode 100644 tools/quantize/transform.py diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index d9728021..00000000 --- a/.gitattributes +++ /dev/null @@ -1,23 +0,0 @@ -# Caffe -*.caffemodel filter=lfs diff=lfs merge=lfs -text - -# Tensorflow -*.pb filter=lfs diff=lfs merge=lfs -text -*.pbtxt filter=lfs diff=lfs merge=lfs -text - -# Torch -*.t7 filter=lfs diff=lfs merge=lfs -text -*.net filter=lfs diff=lfs merge=lfs -text - -# Darknet -*.weights filter=lfs diff=lfs merge=lfs -text - -# ONNX -*.onnx filter=lfs diff=lfs merge=lfs -text - -# Images -*.jpg filter=lfs diff=lfs merge=lfs -text -*.gif filter=lfs diff=lfs merge=lfs -text -*.png filter=lfs diff=lfs merge=lfs -text -*.webp filter=lfs diff=lfs merge=lfs -text - diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 2df6ebfd..00000000 --- a/.gitignore +++ /dev/null @@ -1,9 +0,0 @@ -*.pyc -**/__pycache__ -**/__pycache__/** - -.vscode - -build/ -**/build -**/build/** diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 261eeb9e..00000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/README.md b/README.md index 9c747cc5..8a5f7780 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,9 @@ Guidelines: git lfs install git lfs pull ``` +- **Alternative (Manual Download)**: + If `git lfs pull` fails or you prefer manual download, you can find all models hosted on **[Hugging Face](https://huggingface.co/opencv/opencv_zoo)**. + Simply navigate to the `models` directory on Hugging Face and download the `.onnx` files you need. - To run benchmarks on your hardware settings, please refer to [benchmark/README](./benchmark/README.md). ## Models & Benchmark Results diff --git a/benchmark/README.md b/benchmark/README.md deleted file mode 100644 index a4bb9c8f..00000000 --- a/benchmark/README.md +++ /dev/null @@ -1,987 +0,0 @@ -# OpenCV Zoo Benchmark - -Benchmarking the speed of OpenCV DNN inferring different models in the zoo. Result of each model includes the time of its preprocessing, inference and postprocessing stages. - -Data for benchmarking will be downloaded and loaded in [data](./data) based on given config. - -## Preparation - -1. Install `python >= 3.6`. -2. Install dependencies: `pip install -r requirements.txt`. -3. Download data for benchmarking. - 1. Download all data: `python download_data.py` - 2. Download one or more specified data: `python download_data.py face text`. Available names can be found in `download_data.py`. - 3. You can also download all data from https://pan.baidu.com/s/18sV8D4vXUb2xC9EG45k7bg (code: pvrw). Please place and extract data packages under [./data](./data). - -## Benchmarking - -**Linux**: - -```shell -export PYTHONPATH=$PYTHONPATH:.. - -# Single config -python benchmark.py --cfg ./config/face_detection_yunet.yaml - -# All configs -python benchmark.py --all - -# All configs but only fp32 models (--fp32, --fp16, --int8 --int8bq are available for now) -python benchmark.py --all --fp32 - -# All configs but exclude some of them (fill with config name keywords, not sensitive to upper/lower case, seperate with colons) -python benchmark.py --all --cfg_exclude wechat -python benchmark.py --all --cfg_exclude wechat:crnn - -# All configs but exclude some of the models (fill with exact model names, sensitive to upper/lower case, seperate with colons) -python benchmark.py --all --model_exclude license_plate_detection_lpd_yunet_2023mar_int8.onnx:human_segmentation_pphumanseg_2023mar_int8.onnx - -# All configs with overwritten backend and target (run with --help to get available combinations) -python benchmark.py --all --cfg_overwrite_backend_target 1 -``` - -**Windows**: -- CMD - ```shell - set PYTHONPATH=%PYTHONPATH%;.. - python benchmark.py --cfg ./config/face_detection_yunet.yaml - ``` - -- PowerShell - ```shell - $env:PYTHONPATH=$env:PYTHONPATH+";.." - python benchmark.py --cfg ./config/face_detection_yunet.yaml - ``` - -## Detailed Results - -Benchmark is done with latest opencv-python & opencv-contrib-python (current 4.10.0) on the following platforms. Some models are excluded because of support issues. - -### Intel 12700K - -Specs: [details](https://www.intel.com/content/www/us/en/products/sku/134594/intel-core-i712700k-processor-25m-cache-up-to-5-00-ghz/specifications.html) -- CPU: 8 Performance-cores, 4 Efficient-cores, 20 threads - - Performance-core: 3.60 GHz base freq, turbo up to 4.90 GHz - - Efficient-core: 2.70 GHz base freq, turbo up to 3.80 GHz - -CPU: - -``` -$ python3 benchmark.py --all -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -0.69 0.70 0.68 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -0.79 0.80 0.68 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -5.09 5.13 4.96 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -6.50 6.79 4.96 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -1.79 1.76 1.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -2.92 3.11 1.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -2.40 2.43 2.37 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -3.11 3.15 2.37 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -5.59 5.56 5.28 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -6.07 6.22 5.28 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -3.13 3.14 3.05 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -3.04 3.02 2.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -3.46 3.03 2.92 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -3.84 3.77 2.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -19.47 19.47 19.08 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -21.52 21.86 19.08 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -5.68 5.66 5.51 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -7.41 7.36 5.51 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -41.02 40.99 40.86 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -42.23 42.30 40.86 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -78.77 79.76 77.16 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -75.69 75.58 72.57 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -4.01 3.84 3.79 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -5.35 5.41 5.22 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -6.73 6.85 5.22 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -7.65 7.65 7.55 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -15.56 15.57 15.10 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -16.67 16.57 15.10 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -6.33 6.63 6.14 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -1.19 1.30 1.07 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] -18.76 19.59 18.48 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -18.59 19.33 18.12 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -22.05 18.60 18.12 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -24.47 25.06 18.12 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -10.61 10.66 10.50 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -11.03 11.23 10.50 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -9.85 11.62 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -10.02 9.71 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -9.53 7.83 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -9.68 9.21 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -9.85 10.63 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -9.63 9.28 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### Raspberry Pi 4B - -Specs: [details](https://www.raspberrypi.com/products/raspberry-pi-4-model-b/specifications/) -- CPU: Broadcom BCM2711, Quad core Cortex-A72 (ARM v8) 64-bit SoC @ 1.5 GHz. - -CPU: - -``` -$ python3 benchmark.py --all -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -6.23 6.27 6.18 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -6.68 6.73 6.18 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -68.82 69.06 68.45 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -87.42 89.84 68.45 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -27.81 27.77 27.67 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -35.71 36.67 27.67 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -42.58 42.41 42.25 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -46.49 46.95 42.25 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -71.35 71.62 70.78 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -73.81 74.23 70.78 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -64.20 64.30 63.98 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -57.91 58.41 52.53 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -61.35 52.83 52.53 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -61.49 61.28 52.53 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -420.93 420.73 419.04 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -410.96 395.74 364.68 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -153.87 152.71 140.85 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -157.86 145.90 140.85 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -214.59 211.95 210.98 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -215.09 238.39 208.18 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -1614.13 1639.80 1476.58 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -1597.92 1599.12 1476.58 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -48.55 46.87 41.75 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -97.05 95.40 80.93 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -112.39 116.22 80.93 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -105.60 113.27 88.55 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -478.89 498.05 444.14 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -442.56 477.87 369.59 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -116.15 120.13 106.81 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -5.90 5.90 5.81 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] -325.02 325.88 303.55 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -323.54 332.45 303.55 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -372.32 328.56 303.55 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -407.90 411.97 303.55 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -235.70 236.07 234.87 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -240.95 241.14 234.87 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -226.09 247.02 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -229.25 224.63 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -224.10 201.29 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -223.58 219.82 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -225.60 243.89 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -220.97 223.16 193.91 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### Jetson Nano B01 - -Specs: [details](https://developer.nvidia.com/embedded/jetson-nano-developer-kit) -- CPU: Quad-core ARM A57 @ 1.43 GHz -- GPU: 128-core NVIDIA Maxwell - -CPU: - -``` -$ python3 benchmark.py --all -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -5.62 5.54 5.52 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -6.14 6.24 5.52 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -64.80 64.95 64.60 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -78.31 79.85 64.60 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -26.54 26.61 26.37 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -33.96 34.85 26.37 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -38.45 41.45 38.20 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -42.62 43.20 38.20 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -64.95 64.85 64.73 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -72.39 73.16 64.73 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -65.72 65.98 65.59 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -56.66 57.56 49.10 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -62.09 49.27 49.10 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -62.17 62.02 49.10 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -346.78 348.06 345.53 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -371.11 373.54 345.53 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -134.36 134.33 133.45 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -140.62 140.94 133.45 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -215.67 216.76 214.69 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -216.58 216.78 214.69 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -1209.12 1213.05 1201.68 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -1240.02 1249.95 1201.68 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -48.39 47.38 45.00 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -75.30 75.25 74.96 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -83.83 84.99 74.96 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -87.65 87.59 87.37 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -356.78 357.77 355.69 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -346.84 351.10 335.96 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -75.20 79.36 73.71 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -5.56 5.56 5.48 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] -209.80 210.04 208.84 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -209.60 212.74 208.49 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -254.56 211.17 208.49 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -286.57 296.56 208.49 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -252.60 252.48 252.21 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -259.28 261.38 252.21 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -245.18 266.94 220.49 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -247.72 244.25 220.49 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -241.63 221.43 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -243.46 238.98 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -246.87 256.05 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -243.37 238.90 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -GPU (CUDA-FP32): - -``` -$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 1 -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_CUDA -target=cv.dnn.DNN_TARGET_CUDA -mean median min input size model -10.99 10.71 9.64 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -25.25 25.81 24.54 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -13.97 14.01 13.72 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -24.47 24.36 23.69 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -67.25 67.99 64.90 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -28.96 28.92 28.85 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -28.61 28.45 27.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -98.80 100.11 94.57 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -54.88 56.51 52.78 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -63.86 63.59 63.35 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -371.32 374.79 367.78 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -47.26 45.56 44.69 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -37.61 37.61 33.64 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -37.39 37.71 37.03 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -90.84 91.34 85.77 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -76.44 78.00 74.90 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -112.68 112.21 110.42 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -112.48 111.86 110.04 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -43.99 43.33 41.68 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -44.97 44.42 41.68 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -36.77 46.38 21.77 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -``` - -GPU (CUDA-FP16): - -``` -$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 2 -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_CUDA -target=cv.dnn.DNN_TARGET_CUDA_FP16 -mean median min input size model -25.05 25.05 24.95 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -117.82 126.96 113.17 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -88.54 88.33 88.04 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -97.43 97.38 96.98 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -69.40 68.28 66.36 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -120.92 131.57 119.37 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -128.43 128.08 119.37 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -64.90 63.88 62.81 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -370.21 371.97 366.38 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -164.28 164.75 162.94 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -299.22 300.54 295.64 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -49.61 47.58 47.14 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -149.50 151.12 147.24 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -156.59 154.01 153.92 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -43.66 43.64 43.31 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -75.87 77.33 74.38 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -428.97 428.99 426.11 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -428.66 427.46 425.66 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -32.41 31.90 31.68 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -33.42 35.75 31.68 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -29.34 36.44 21.27 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -``` - -### Khadas VIM3 - -Specs: [details](https://www.khadas.com/vim3) -- (SoC) CPU: Amlogic A311D, 2.2 GHz Quad core ARM Cortex-A73 and 1.8 GHz dual core Cortex-A53 -- NPU: 5 TOPS Performance NPU INT8 inference up to 1536 MAC Supports all major deep learning frameworks including TensorFlow and Caffe - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -4.62 4.62 4.53 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -5.24 5.29 4.53 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -55.04 54.55 53.54 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -67.34 67.96 53.54 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -29.50 45.62 26.14 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -35.59 36.22 26.14 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -35.80 35.08 34.76 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -40.32 45.32 34.76 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -71.92 66.92 62.98 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -70.68 72.31 62.98 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -59.27 53.91 52.09 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -52.17 67.58 41.23 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -55.44 47.28 41.23 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -55.83 56.80 41.23 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -335.75 329.39 325.42 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -340.42 335.78 325.42 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -128.58 127.15 124.03 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -125.85 126.47 110.14 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -179.93 170.66 166.76 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -178.61 213.72 164.61 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -1108.12 1100.93 1072.45 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -1100.58 1121.31 982.74 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -32.20 32.84 30.99 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -78.26 78.96 75.60 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -87.18 88.22 75.60 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -83.22 84.20 80.07 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -327.07 339.80 321.98 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -316.56 302.60 269.10 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -75.38 73.67 70.15 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -211.02 213.14 199.28 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -210.19 217.15 199.28 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -242.34 225.59 199.28 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -265.33 271.87 199.28 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -194.77 195.13 192.69 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -197.16 200.94 192.69 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -185.45 199.47 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -187.64 180.57 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -182.53 166.96 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -182.90 178.97 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -184.26 194.43 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -180.65 180.59 155.36 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -NPU (TIMVX): - -``` -$ python3 benchmark.py --all --int8 --cfg_overwrite_backend_target 3 -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_TIMVX -target=cv.dnn.DNN_TARGET_NPU -mean median min input size model -5.24 7.45 4.77 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -45.96 46.10 43.21 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -30.25 30.30 28.68 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -19.75 20.18 18.19 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -28.75 28.85 28.47 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -148.80 148.85 143.45 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -143.17 141.11 136.58 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -73.19 78.57 62.89 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -32.11 30.50 29.97 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -116.32 120.72 99.40 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -408.18 418.89 374.12 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -37.34 38.57 32.03 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -41.82 39.84 37.63 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -160.70 160.90 153.15 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -160.47 160.48 151.88 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -239.38 237.47 231.95 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -197.61 201.16 162.69 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -196.69 164.78 162.69 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### Atlas 200 DK - -Specs: [details_en](https://e.huawei.com/uk/products/cloud-computing-dc/atlas/atlas-200), [details_cn](https://www.hiascend.com/zh/hardware/developer-kit) -- (SoC) CPU: 8-core Coretext-A55 @ 1.6 GHz (max) -- NPU: Ascend 310, dual DaVinci AI cores, 22/16/8 TOPS INT8. - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -7.82 7.82 7.77 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -8.57 8.77 7.77 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -92.21 92.11 91.87 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -122.07 126.02 91.87 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -42.93 43.26 42.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -55.91 57.40 42.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -67.85 67.91 67.47 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -70.06 70.21 67.47 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -102.49 102.65 102.10 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -114.02 116.16 102.10 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -92.66 92.49 92.36 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -79.39 80.75 68.47 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -89.66 68.66 68.47 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -90.59 92.13 68.47 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -499.55 500.15 498.36 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -571.85 580.88 498.36 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -201.99 201.55 200.62 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -216.72 217.34 200.62 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -313.66 313.85 312.13 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -322.98 323.45 312.13 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -1875.33 1877.53 1871.26 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -1989.04 2005.25 1871.26 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -143.62 143.19 137.16 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -159.80 159.62 159.40 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -152.18 152.86 145.56 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -145.83 145.77 145.45 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -521.46 521.66 520.28 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -541.50 544.02 520.28 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -134.02 136.01 132.06 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -360.26 360.82 359.13 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -361.22 361.51 359.13 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -427.85 362.87 359.13 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -475.44 490.06 359.13 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -285.19 284.91 284.69 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -318.96 323.30 284.69 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -289.82 360.87 244.07 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -285.40 303.13 244.07 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -274.67 244.47 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -277.84 262.99 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -283.02 280.77 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -279.21 262.55 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -NPU (CANN): - - - -``` -$ python3 benchmark.py --all --fp32 --cfg_exclude wechat:crnn:vittrack --model_exclude pose_estimation_mediapipe_2023mar.onnx --cfg_overwrite_backend_target 4 -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_CANN -target=cv.dnn.DNN_TARGET_NPU -mean median min input size model -2.24 2.21 2.19 [160, 120] YuNet with ['face_detection_yunet_2022mar.onnx'] -2.66 2.66 2.64 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -2.19 2.19 2.16 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -6.27 6.22 6.17 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -6.94 6.94 6.85 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -5.15 5.13 5.10 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -5.41 5.42 5.10 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -6.99 6.99 6.95 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -7.63 7.64 7.43 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -20.62 22.09 19.16 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -28.59 28.60 27.91 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -5.17 5.26 5.09 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -16.45 16.44 16.31 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -5.58 5.57 5.54 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -``` - -### Toybrick RV1126 - -Specs: [details](https://t.rock-chips.com/en/portal.php?mod=view&aid=26) -- CPU: Quard core ARM Cortex-A7, up to 1.5GHz -- NPU (Not supported by OpenCV): 2.0TOPS, support 8bit / 16bit - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -56.78 56.74 56.46 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -51.16 51.41 45.18 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -1737.74 1733.23 1723.65 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -1298.48 1336.02 920.44 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -609.51 611.79 584.89 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -500.21 517.38 399.97 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -465.12 471.89 445.36 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -389.95 385.01 318.29 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -10.16.66.1781623.94 1607.90 1595.09 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -1109.61 1186.03 671.15 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -1567.09 1578.61 1542.75 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -1188.83 1219.46 850.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -996.30 884.80 689.11 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -849.51 805.93 507.78 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -11855.64 11836.80 11750.10 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -7752.60 8149.00 4429.83 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -3260.22 3251.14 3204.85 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -2287.10 2400.53 1482.04 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -2335.89 2335.93 2313.63 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -1899.16 1945.72 1529.46 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -37600.81 37558.85 37414.98 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -24185.35 25519.27 13395.47 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -411.41 448.29 397.86 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -905.77 890.22 866.06 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -780.94 817.69 653.26 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -1315.48 1321.44 1299.68 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -11143.23 11155.05 11105.11 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -7056.60 7457.76 3753.42 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -736.02 732.90 701.14 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -4267.03 4288.42 4229.69 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -4265.58 4276.54 4222.22 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -3678.65 4265.95 2636.57 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -3383.73 3490.66 2636.57 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -2180.44 2197.45 2152.67 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -2217.08 2241.77 2152.67 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -2217.15 2251.65 2152.67 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -2206.73 2219.60 2152.63 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -2208.84 2219.14 2152.63 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -2035.98 2185.05 1268.94 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -1927.93 2178.84 1268.94 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -1822.23 2213.30 1183.93 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### Khadas Edge2 (with RK3588) - -Board specs: [details](https://www.khadas.com/edge2) -SoC specs: [details](https://www.rock-chips.com/a/en/products/RK35_Series/2022/0926/1660.html) -- CPU: 2.25GHz Quad Core ARM Cortex-A76 + 1.8GHz Quad Core Cortex-A55 -- NPU (Not supported by OpenCV): Build-in 6 TOPS Performance NPU, triple core, support int4 / int8 / int16 / fp16 / bf16 / tf32 - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -2.30 2.29 2.26 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -2.70 2.73 2.26 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -28.94 29.00 28.60 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -37.46 38.85 28.60 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -12.44 12.40 12.36 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -17.14 17.64 12.36 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -20.22 20.36 20.08 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -23.11 23.50 20.08 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -29.63 29.78 28.61 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -35.57 35.61 28.61 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -27.45 27.46 27.25 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -22.95 23.37 19.13 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -27.50 19.40 19.13 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -28.46 29.33 19.13 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -151.10 151.79 146.96 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -181.69 184.19 146.96 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -53.83 52.64 50.24 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -60.95 60.06 50.24 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -98.03 104.53 83.47 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -106.91 110.68 83.47 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -554.30 550.32 538.99 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -591.95 599.62 538.99 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -14.02 13.89 13.56 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -45.03 44.65 43.28 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -50.87 52.24 43.28 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -42.90 42.68 42.40 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -148.01 146.42 139.56 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -159.16 155.98 139.56 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -37.06 37.43 36.39 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -103.42 104.24 101.26 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -103.41 104.41 100.08 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -126.21 103.90 100.08 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -142.53 147.66 100.08 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -69.49 69.52 69.17 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -70.63 70.69 69.17 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -67.15 72.03 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -67.74 66.72 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -66.26 61.46 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -67.36 65.65 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -68.52 69.93 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -68.36 65.65 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### Horizon Sunrise X3 PI - -Specs: [details_cn](https://developer.horizon.ai/sunrise) -- CPU: ARM Cortex-A53,4xCore, 1.2G -- BPU (aka NPU, not supported by OpenCV): (Bernoulli Arch) 2×Core,up to 1.0G, ~5Tops - -CPU: - -``` -$ python3 benchmark.py --all -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -10.56 10.69 10.46 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -12.45 12.60 10.46 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -124.80 127.36 124.45 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -168.67 174.03 124.45 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -55.12 55.38 54.91 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -76.31 79.00 54.91 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -77.44 77.53 77.07 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -89.22 90.40 77.07 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -132.95 133.21 132.35 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -147.40 149.99 132.35 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -119.71 120.69 119.32 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -102.57 104.40 88.49 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -114.56 88.81 88.49 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -117.12 116.07 88.49 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -653.39 653.85 651.99 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -706.43 712.61 651.99 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -252.05 252.16 250.98 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -273.03 274.27 250.98 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -399.35 405.40 390.82 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -413.37 410.75 390.82 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -2516.91 2516.82 2506.54 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -2544.65 2551.55 2506.54 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -84.15 85.18 77.31 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -168.54 169.05 168.15 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -196.46 199.81 168.15 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -172.55 172.83 171.85 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -678.74 678.04 677.44 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -653.71 655.74 631.68 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -162.87 165.82 160.04 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -9.93 9.97 9.82 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] -475.98 475.34 472.72 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -475.90 477.57 472.44 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -585.72 475.98 472.44 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -663.34 687.10 472.44 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -446.82 445.92 444.32 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -453.60 456.07 444.32 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -427.47 463.88 381.10 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -432.15 421.18 381.10 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -420.61 386.28 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -425.24 426.69 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -431.14 447.85 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -424.77 417.01 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### MAIX-III AX-PI - -Specs: [details_en](https://wiki.sipeed.com/hardware/en/maixIII/ax-pi/axpi.html#Hardware), [details_cn](https://wiki.sipeed.com/hardware/zh/maixIII/ax-pi/axpi.html#%E7%A1%AC%E4%BB%B6%E5%8F%82%E6%95%B0) -SoC specs: [details_cn](https://axera-tech.com/product/T7297367876123493768) -- CPU: Quad cores ARM Cortex-A7 -- NPU (Not supported by OpenCV): 14.4Tops@int4,3.6Tops@int8 - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -83.95 83.76 83.62 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -79.35 79.92 75.47 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -2326.96 2326.49 2326.08 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -1950.83 1988.86 1648.47 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -823.42 823.35 822.50 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -750.31 757.91 691.41 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -664.73 664.61 663.84 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -596.29 603.96 540.72 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -2175.34 2173.62 2172.91 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -1655.11 1705.43 1236.22 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -2123.08 2122.92 2122.18 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -1619.08 1672.32 1215.05 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -1470.74 1216.86 1215.05 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -1287.09 1242.01 873.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -15841.89 15841.20 15828.32 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -11652.03 12079.50 8299.15 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -4371.75 4396.81 4370.29 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -3428.89 3521.87 2670.46 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -3421.19 3412.22 3411.20 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -2990.22 3034.11 2645.09 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -50633.38 50617.44 50614.78 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -36260.23 37731.28 24683.40 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -548.36 551.97 537.90 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -1285.54 1285.40 1284.43 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -1204.04 1211.89 1137.65 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -1849.87 1848.78 1847.80 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -14895.99 14894.27 14884.17 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -10496.44 10931.97 6976.60 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -1045.98 1052.05 1040.56 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -5899.23 5900.08 5896.73 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -5889.39 5890.58 5878.81 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -5436.61 5884.03 4665.77 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -5185.53 5273.76 4539.47 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -3230.95 3226.14 3225.53 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -3281.31 3295.46 3225.53 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -3247.56 3337.52 3196.25 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -3243.20 3276.35 3196.25 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -3230.49 3196.80 3195.02 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -3065.33 3217.99 2348.42 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -2976.24 3244.75 2348.42 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -2864.72 3219.46 2208.44 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### StarFive VisionFive 2 - -Specs: [details_cn](https://doc.rvspace.org/VisionFive2/PB/VisionFive_2/specification_pb.html), [details_en](https://doc-en.rvspace.org/VisionFive2/Product_Brief/VisionFive_2/specification_pb.html) -- CPU: StarFive JH7110 with RISC-V quad-core CPU with 2 MB L2 cache and a monitor core, supporting RV64GC ISA, working up to 1.5 GHz -- GPU: IMG BXE-4-32 MC1 with work frequency up to 600 MHz - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -41.13 41.07 41.06 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -37.43 37.83 34.35 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -1169.96 1169.72 1168.74 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -887.13 987.00 659.71 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -423.91 423.98 423.62 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -350.89 358.26 292.27 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -319.69 319.26 318.76 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -278.74 282.75 245.22 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -1127.61 1127.36 1127.17 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -785.44 819.07 510.77 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -1079.69 1079.66 1079.31 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -820.15 845.54 611.26 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -698.13 612.64 516.41 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -600.12 564.13 382.59 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -8116.21 8127.96 8113.70 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -5408.02 5677.71 3240.16 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -2267.96 2268.26 2266.59 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -1605.80 1671.91 1073.50 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -1731.61 1733.17 1730.54 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -1435.43 1477.52 1196.01 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -26185.41 26190.85 26168.68 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -17019.14 17923.20 9673.68 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -288.95 290.28 260.40 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -628.64 628.47 628.27 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -562.90 569.91 509.93 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -910.38 910.94 909.64 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -7613.64 7626.26 7606.07 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -4895.28 5166.85 2716.65 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -524.52 526.33 522.71 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -2988.22 2996.51 2980.17 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -2981.84 2979.74 2975.80 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -2610.78 2979.14 1979.37 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -2425.29 2478.92 1979.37 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -1404.01 1415.46 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -1425.42 1426.51 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -1432.21 1450.47 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -1425.24 1448.27 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -1428.84 1446.76 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -1313.68 1427.46 808.70 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -1242.07 1408.93 808.70 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -1174.32 1426.07 774.78 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### Khadas VIM4 - -Board specs: https://www.khadas.com/vim4, https://dl.khadas.com/products/vim4/specs/vim4-specs.pdf - -SoC specs: -- CPU: Amlogic A311D2, 2.2GHz Quad core ARM Cortex-A73 and 2.0GHz Quad core Cortex-A53 CPU, with 32-bit STM32G031K6 microprocessor. -- GPU: Mali G52MP8(8EE) 800Mhz GPU. -- NPU: 3.2 TOPS Build-in NPU (Not supported by dnn yet) - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -4.27 4.33 4.17 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -4.58 4.58 4.17 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -39.94 39.98 39.42 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -49.33 50.59 39.42 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -17.28 17.63 16.93 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -22.78 23.27 16.93 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -25.83 25.46 25.30 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -28.23 28.87 25.30 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -47.68 47.72 45.65 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -49.25 49.45 45.65 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -38.73 38.18 37.89 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -33.68 33.99 29.16 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -36.22 29.50 29.16 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -36.12 35.69 29.16 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -219.81 220.21 215.97 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -224.03 222.27 215.97 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -81.46 84.07 77.95 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -81.46 83.07 77.95 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -136.14 136.12 128.61 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -136.57 136.30 128.61 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -805.54 805.23 795.82 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -768.87 766.00 727.12 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -29.47 29.39 28.49 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -54.45 54.76 53.45 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -60.84 61.07 53.45 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -57.22 57.22 56.14 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -218.22 224.50 215.54 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -199.53 203.24 179.85 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -53.06 54.61 51.82 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -148.82 149.62 146.73 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -148.91 148.99 146.59 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -175.33 150.60 146.59 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -194.12 201.48 146.59 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -133.27 132.90 132.54 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -135.27 135.12 132.54 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -127.49 137.43 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -129.18 125.95 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -125.82 114.44 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -127.63 124.81 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -129.24 134.50 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -126.64 125.09 110.45 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -### Jetson Nano Orin - -Specs: https://www.nvidia.com/en-us/autonomous-machines/embedded-systems/jetson-orin/ -- CPU: 6-core Arm® Cortex®-A78AE v8.2 64-bit CPU, 1.5MB L2 + 4MB L3 -- GPU: 1024-core NVIDIA Ampere architecture GPU with 32 Tensor Cores, max freq 625MHz - -CPU: - -``` -$ python3 benchmark.py --all -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -2.59 2.62 2.50 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -2.98 2.97 2.50 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -20.05 24.76 19.75 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -31.84 32.72 19.75 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -9.15 9.22 9.04 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -14.33 15.35 9.04 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -15.00 15.17 14.80 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -18.37 18.63 14.80 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -24.86 25.09 24.12 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -30.17 34.51 24.12 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -18.47 18.55 18.23 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -17.08 17.30 15.80 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -21.26 15.89 15.80 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -23.19 24.15 15.80 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -102.30 101.90 101.44 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -142.33 146.24 101.44 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -39.91 39.01 38.46 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -51.35 50.70 38.46 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -125.31 126.50 121.92 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -132.95 133.67 121.92 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -400.91 430.48 384.87 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -476.63 509.48 384.87 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -19.16 19.91 18.04 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -27.73 26.93 26.72 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -35.16 41.14 26.72 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -33.05 33.18 32.67 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -93.58 94.02 92.36 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -119.80 153.20 92.36 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -31.51 32.19 30.69 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -3.53 3.53 3.51 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] -78.10 77.77 77.17 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -78.03 78.38 77.17 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -99.09 79.42 77.17 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -112.82 116.06 77.17 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -142.97 142.84 135.56 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -144.53 148.52 135.56 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -134.47 146.62 112.91 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -136.37 131.39 112.91 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -132.08 117.15 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -135.17 130.23 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -138.38 143.25 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -137.08 134.22 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` - -GPU (CUDA-FP32): - -``` -$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 1 -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_CUDA -target=cv.dnn.DNN_TARGET_CUDA -mean median min input size model -5.23 5.27 5.17 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -7.59 7.62 7.55 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -8.48 8.46 8.37 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -12.29 13.04 11.11 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -12.91 13.28 12.79 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -8.41 8.42 8.35 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -9.36 9.43 8.35 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -32.58 32.71 31.11 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -16.33 16.08 16.04 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -24.46 24.35 24.01 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -103.28 103.41 102.37 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -19.75 19.78 19.10 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -10.84 10.76 10.75 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -14.50 14.50 14.36 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -23.53 23.36 23.16 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -26.54 27.22 25.99 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -27.49 27.80 26.97 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -27.53 27.75 26.95 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -15.66 16.30 15.41 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -15.91 15.80 15.41 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -13.58 16.70 9.48 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -``` - -GPU (CUDA-FP16): - -``` -$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 2 -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_CUDA -target=cv.dnn.DNN_TARGET_CUDA_FP16 -mean median min input size model -5.00 5.04 4.92 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -5.09 5.08 5.05 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -6.81 6.86 6.66 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -9.19 10.18 9.06 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -16.20 16.62 15.93 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -6.84 6.82 6.80 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -7.46 7.87 6.80 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -14.18 14.16 14.03 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -13.35 13.10 13.04 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -19.94 19.95 19.50 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -72.25 72.91 70.99 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -22.37 22.44 21.60 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -8.92 8.92 8.84 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -11.11 11.13 10.98 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -13.22 13.23 13.12 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -26.79 27.04 26.24 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -19.71 19.75 19.47 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -19.76 19.93 19.47 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -16.30 15.88 15.80 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -16.36 16.51 15.80 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -13.64 16.27 8.90 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -``` - -### Atlas 200I DK A2 - -Specs: https://www.hiascend.com/hardware/developer-kit-a2 (cn) -- CPU: 4 core * 1.0 GHz -- NPU: Ascend 310B, 8 TOPS INT8, 4 TFLOPS FP16 (Benchmark results are coming later) - -CPU: - -``` -$ python3 benchmark.py --all --cfg_exclude wechat -Benchmarking ... -backend=cv.dnn.DNN_BACKEND_OPENCV -target=cv.dnn.DNN_TARGET_CPU -mean median min input size model -6.67 6.80 5.17 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] -8.70 9.22 5.17 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] -78.90 81.48 74.18 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] -113.79 115.49 74.18 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] -36.94 38.64 33.23 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] -55.14 60.34 33.23 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] -56.00 55.56 51.99 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] -71.09 72.20 51.99 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] -78.01 80.36 73.97 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] -111.56 113.84 73.97 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] -70.20 68.69 65.12 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] -61.72 63.39 48.28 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] -80.12 54.37 48.28 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] -87.42 96.71 48.28 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] -417.31 417.30 406.17 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] -597.15 619.24 406.17 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] -155.73 153.40 145.10 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] -200.41 200.24 145.10 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] -253.05 252.73 245.91 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] -274.44 269.76 245.91 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] -1407.75 1416.44 1357.23 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] -1716.25 1709.35 1357.23 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] -37.02 37.66 32.50 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] -92.56 97.78 87.87 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] -119.29 123.56 87.87 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] -90.13 90.75 87.78 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] -285.75 284.54 278.06 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] -389.02 405.12 278.06 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] -83.16 85.91 77.83 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] -219.28 220.74 214.53 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] -217.18 227.44 207.15 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] -319.73 210.22 207.15 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] -396.47 399.45 207.15 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] -165.34 172.10 156.36 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] -169.22 174.21 156.36 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] -158.82 172.23 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] -159.39 156.42 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] -155.87 146.82 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] -163.43 152.16 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] -173.46 162.85 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] -175.28 145.22 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] -``` diff --git a/benchmark/benchmark.py b/benchmark/benchmark.py deleted file mode 100644 index cbe67e55..00000000 --- a/benchmark/benchmark.py +++ /dev/null @@ -1,232 +0,0 @@ -import os -import argparse - -import yaml -import numpy as np -import cv2 as cv - -from models import MODELS -from utils import METRICS, DATALOADERS - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] -backend_target_str_pairs = [ - ["cv.dnn.DNN_BACKEND_OPENCV", "cv.dnn.DNN_TARGET_CPU"], - ["cv.dnn.DNN_BACKEND_CUDA", "cv.dnn.DNN_TARGET_CUDA"], - ["cv.dnn.DNN_BACKEND_CUDA", "cv.dnn.DNN_TARGET_CUDA_FP16"], - ["cv.dnn.DNN_BACKEND_TIMVX", "cv.dnn.DNN_TARGET_NPU"], - ["cv.dnn.DNN_BACKEND_CANN", "cv.dnn.DNN_TARGET_NPU"] -] - -parser = argparse.ArgumentParser("Benchmarks for OpenCV Zoo.") -parser.add_argument('--cfg', '-c', type=str, - help='Benchmarking on the given config.') -parser.add_argument('--cfg_overwrite_backend_target', type=int, default=-1, - help='''Choose one of the backend-target pair to run this demo: - others: (default) use the one from config, - {:d}: OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument("--cfg_exclude", type=str, help="Configs to be excluded when using --all. Split keywords with colons (:). Not sensitive to upper/lower case.") -parser.add_argument("--model_exclude", type=str, help="Models to be excluded. Split model names with colons (:). Sensitive to upper/lower case.") -parser.add_argument("--fp32", action="store_true", help="Benchmark models of float32 precision only.") -parser.add_argument("--fp16", action="store_true", help="Benchmark models of float16 precision only.") -parser.add_argument("--int8", action="store_true", help="Benchmark models of int8 precision only.") -parser.add_argument("--int8bq", action="store_true", help="Benchmark models of blocked int8 precision only.") -parser.add_argument("--all", action="store_true", help="Benchmark all models") -args = parser.parse_args() - -def build_from_cfg(cfg, registery, key=None, name=None): - if key is not None: - obj_name = cfg.pop(key) - obj = registery.get(obj_name) - return obj(**cfg) - elif name is not None: - obj = registery.get(name) - return obj(**cfg) - else: - raise NotImplementedError() - -class Benchmark: - def __init__(self, **kwargs): - self._type = kwargs.pop('type', None) - if self._type is None: - self._type = 'Base' - print('Benchmark[\'type\'] is omitted, set to \'Base\' by default.') - - self._data_dict = kwargs.pop('data', None) - assert self._data_dict, 'Benchmark[\'data\'] cannot be empty and must have path and files.' - if 'type' in self._data_dict: - self._dataloader = build_from_cfg(self._data_dict, registery=DATALOADERS, key='type') - else: - self._dataloader = build_from_cfg(self._data_dict, registery=DATALOADERS, name=self._type) - - self._metric_dict = kwargs.pop('metric', None) - assert self._metric_dict, 'Benchmark[\'metric\'] cannot be empty.' - if 'type' in self._metric_dict: - self._metric = build_from_cfg(self._metric_dict, registery=METRICS, key='type') - else: - self._metric = build_from_cfg(self._metric_dict, registery=METRICS, name=self._type) - - backend_id = kwargs.pop('backend', 'default') - available_backends = dict( - default=cv.dnn.DNN_BACKEND_DEFAULT, - # halide=cv.dnn.DNN_BACKEND_HALIDE, - # inference_engine=cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, - opencv=cv.dnn.DNN_BACKEND_OPENCV, - # vkcom=cv.dnn.DNN_BACKEND_VKCOM, - cuda=cv.dnn.DNN_BACKEND_CUDA, - timvx=cv.dnn.DNN_BACKEND_TIMVX, - cann=cv.dnn.DNN_BACKEND_CANN, - ) - - target_id = kwargs.pop('target', 'cpu') - available_targets = dict( - cpu=cv.dnn.DNN_TARGET_CPU, - # opencl=cv.dnn.DNN_TARGET_OPENCL, - # opencl_fp16=cv.dnn.DNN_TARGET_OPENCL_FP16, - # myriad=cv.dnn.DNN_TARGET_MYRIAD, - # vulkan=cv.dnn.DNN_TARGET_VULKAN, - # fpga=cv.dnn.DNN_TARGET_FPGA, - cuda=cv.dnn.DNN_TARGET_CUDA, - cuda_fp16=cv.dnn.DNN_TARGET_CUDA_FP16, - # hddl=cv.dnn.DNN_TARGET_HDDL, - npu=cv.dnn.DNN_TARGET_NPU, - ) - - self._backend = available_backends[backend_id] - self._target = available_targets[target_id] - - self._benchmark_results = dict() - self._benchmark_results_brief = dict() - - def setBackendAndTarget(self, backend_id, target_id): - self._backend = backend_id - self._target = target_id - - def run(self, model): - model.setBackendAndTarget(self._backend, self._target) - - for idx, data in enumerate(self._dataloader): - filename, input_data = data[:2] - - if isinstance(input_data, np.ndarray): - size = [input_data.shape[1], input_data.shape[0]] - else: - size = input_data.getFrameSize() - - if str(size) not in self._benchmark_results: - self._benchmark_results[str(size)] = dict() - self._benchmark_results[str(size)][filename] = self._metric.forward(model, *data[1:]) - - if str(size) not in self._benchmark_results_brief: - self._benchmark_results_brief[str(size)] = [] - self._benchmark_results_brief[str(size)] += self._benchmark_results[str(size)][filename] - - def printResults(self, model_name, model_path): - for imgSize, res in self._benchmark_results_brief.items(): - mean, median, minimum = self._metric.getPerfStats(res) - print("{:<10.2f} {:<10.2f} {:<10.2f} {:<12} {} with {}".format( - mean, median, minimum, imgSize, model_name, model_path - )) - -if __name__ == '__main__': - cfgs = [] - if args.cfg is not None: - assert args.cfg.endswith('yaml'), 'Currently support configs of yaml format only.' - with open(args.cfg, 'r') as f: - cfg = yaml.safe_load(f) - cfgs.append(cfg) - elif args.all: - excludes = [] - if args.cfg_exclude is not None: - excludes = args.cfg_exclude.split(":") - - for cfg_fname in sorted(os.listdir("config")): - skip_flag = False - for exc in excludes: - if exc.lower() in cfg_fname.lower(): - skip_flag = True - if skip_flag: - # print("{} is skipped.".format(cfg_fname)) - continue - - assert cfg_fname.endswith("yaml"), "Currently support yaml configs only." - with open(os.path.join("config", cfg_fname), "r") as f: - cfg = yaml.safe_load(f) - cfgs.append(cfg) - else: - raise NotImplementedError("Specify either one config or use flag --all for benchmark.") - - print("Benchmarking ...") - if args.all: - backend_target_id = args.cfg_overwrite_backend_target if args.cfg_overwrite_backend_target >= 0 else 0 - backend_str = backend_target_str_pairs[backend_target_id][0] - target_str = backend_target_str_pairs[backend_target_id][1] - print("backend={}".format(backend_str)) - print("target={}".format(target_str)) - print("{:<10} {:<10} {:<10} {:<12} {}".format("mean", "median", "min", "input size", "model")) - for cfg in cfgs: - # Instantiate benchmark - benchmark = Benchmark(**cfg['Benchmark']) - - # Set backend and target - if args.cfg_overwrite_backend_target >= 0: - backend_id = backend_target_pairs[args.cfg_overwrite_backend_target][0] - target_id = backend_target_pairs[args.cfg_overwrite_backend_target][1] - benchmark.setBackendAndTarget(backend_id, target_id) - - # Instantiate model - model_config = cfg['Model'] - model_handler, model_paths = MODELS.get(model_config.pop('name')) - - _model_paths = [] - if args.fp32 or args.fp16 or args.int8 or args.int8bq: - if args.fp32: - _model_paths += model_paths['fp32'] - if args.fp16: - _model_paths += model_paths['fp16'] - if args.int8: - _model_paths += model_paths['int8'] - if args.int8bq: - _model_paths += model_paths['int8bq'] - else: - _model_paths = model_paths['fp32'] + model_paths['fp16'] + model_paths['int8'] + model_paths["int8bq"] - # filter out excluded models - excludes = [] - if args.model_exclude is not None: - excludes = args.model_exclude.split(":") - _model_paths_excluded = [] - for model_path in _model_paths: - skip_flag = False - for mp in model_path: - for exc in excludes: - if exc in mp: - skip_flag = True - if skip_flag: - continue - _model_paths_excluded.append(model_path) - _model_paths = _model_paths_excluded - - for model_path in _model_paths: - model = model_handler(*model_path, **model_config) - # Format model_path - for i in range(len(model_path)): - model_path[i] = model_path[i].split('/')[-1] - # Run benchmark - benchmark.run(model) - benchmark.printResults(model.name, model_path) diff --git a/benchmark/color_table.svg b/benchmark/color_table.svg deleted file mode 100644 index 480584c0..00000000 --- a/benchmark/color_table.svg +++ /dev/null @@ -1,5161 +0,0 @@ - - - - - - - - image/svg+xml - - - - - - - - - - - - - - - - Faster - - - Slower - - - - - - - - - - - - Model - - - - - - - - - - Task - - - - - - - - - - Input Size - - - - - - - - - - Intel - - - 12700K - - - CPU - - - - - - - - - - Atlas 200I DK A2 - - - Ascend 310B - - - CPU - - - - - - - - - - Atlas 200 DK - - - Ascend 310 - - - CPU - - - - - - - - - - Khadas VIM3 - - - A311D - - - CPU - - - - - - - - - - Khadas VIM4 - - - A311D2 - - - CPU - - - - - - - - - - Khadas Edge2 - - - RK3588S - - - CPU - - - - - - - - - - Jetson Nano - - - B01 - - - CPU - - - - - - - - - - Jetson Nano - - - Orin - - - CPU - - - - - - - - - - Raspberry Pi 4B - - - BCM2711 - - - CPU - - - - - - - - - - Horizon Sunrise Pi - - - X3 - - - CPU - - - - - - - - - - MAIX-III AX-Pi - - - AX620A - - - CPU - - - - - - - - - - Toybrick - - - RV1126 - - - CPU - - - - - - - - - - StarFive VisionFive 2 - - - StarFive JH7110 - - - CPU - - - - - - - - - - Jetson Nano - - - B01 - - - GPU - - - - - - - - - - Jetson Nano - - - Orin - - - GPU - - - - - - - - - - Khadas VIM3 - - - A311D - - - NPU - - - - - - - - - - Atlas 200 DK - - - Ascend 310 - - - NPU - - - - - - - - - - YuNet - - - - - - - Face Detection - - - - - - 160x120 - - - - - - 0.69 - - - - - - 6.67 - - - - - - 7.82 - - - - - - 4.62 - - - - - - 4.27 - - - - - - 2.30 - - - - - - 5.62 - - - - - - 2.59 - - - - - - 6.23 - - - - - - 10.56 - - - - - - 83.95 - - - - - - 56.78 - - - - - - 41.13 - - - - - - 10.99 - - - - - - 5.23 - - - - - - 5.24 - - - - - - 2.24 - - - - - - - - - SFace - - - - - - - Face Recognition - - - - - - 112x112 - - - - - - 5.09 - - - - - - 78.90 - - - - - - 92.21 - - - - - - 55.04 - - - - - - 39.94 - - - - - - 28.94 - - - - - - 64.80 - - - - - - 20.05 - - - - - - 68.82 - - - - - - 124.80 - - - - - - 2326.96 - - - - - - 1737.74 - - - - - - 1169.96 - - - - - - 25.25 - - - - - - 7.59 - - - - - - 45.96 - - - - - - 2.66 - - - - - - - - - FER - - - - - - - Face Expression Recognition - - - - - - 112x112 - - - - - - 1.79 - - - - - - 36.94 - - - - - - 42.93 - - - - - - 29.50 - - - - - - 17.28 - - - - - - 12.44 - - - - - - 26.54 - - - - - - 9.15 - - - - - - 27.81 - - - - - - 55.12 - - - - - - 823.42 - - - - - - 609.51 - - - - - - 423.91 - - - - - - 13.97 - - - - - - 8.48 - - - - - - 30.25 - - - - - - 2.19 - - - - - - - - - LPD_YuNet - - - - - - - License Plate Detection - - - - - - 320x240 - - - - - - 5.68 - - - - - - 155.73 - - - - - - 201.99 - - - - - - 128.58 - - - - - - 81.46 - - - - - - 53.83 - - - - - - 134.36 - - - - - - 39.91 - - - - - - 153.87 - - - - - - 252.05 - - - - - - 4371.75 - - - - - - 3260.22 - - - - - - 2267.96 - - - - - - 54.88 - - - - - - 16.33 - - - - - - 32.11 - - - - - - 7.63 - - - - - - - - - YOLOX - - - - - - - Object Detection - - - - - - 640x640 - - - - - - 78.77 - - - - - - 1407.75 - - - - - - 1875.33 - - - - - - 1108.12 - - - - - - 805.54 - - - - - - 554.30 - - - - - - 1209.12 - - - - - - 400.91 - - - - - - 1614.13 - - - - - - 2516.91 - - - - - - 50633.38 - - - - - - 37600.81 - - - - - - 26185.41 - - - - - - 371.32 - - - - - - 103.28 - - - - - - 408.18 - - - - - - 28.59 - - - - - - - - - NanoDet - - - - - - - Object Detection - - - - - - 416x416 - - - - - - 41.02 - - - - - - 253.05 - - - - - - 313.66 - - - - - - 179.93 - - - - - - 136.14 - - - - - - 98.03 - - - - - - 215.67 - - - - - - 125.31 - - - - - - 214.59 - - - - - - 399.35 - - - - - - 3421.19 - - - - - - 2335.89 - - - - - - 1731.61 - - - - - - 63.86 - - - - - - 24.46 - - - - - - 116.32 - - - - - - 20.62 - - - - - - - - - PPOCRDet-CN - - - - - - - Text Detection - - - - - - 640x480 - - - - - - 18.76 - - - - - - 219.28 - - - - - - 360.26 - - - - - - 211.02 - - - - - - 148.82 - - - - - - 103.42 - - - - - - 209.80 - - - - - - 78.10 - - - - - - 325.02 - - - - - - 475.98 - - - - - - 5899.23 - - - - - - 4267.03 - - - - - - 2988.22 - - - - - - 112.68 - - - - - - 27.49 - - - - - - 160.70 - - - - - - --- - - - - - - - - - PPOCRDet-EN - - - - - - - Text Detection - - - - - - 640x480 - - - - - - 18.59 - - - - - - 217.18 - - - - - - 361.22 - - - - - - 210.19 - - - - - - 148.91 - - - - - - 103.41 - - - - - - 209.60 - - - - - - 78.03 - - - - - - 323.54 - - - - - - 475.90 - - - - - - 5889.39 - - - - - - 4265.58 - - - - - - 2981.84 - - - - - - 112.48 - - - - - - 27.53 - - - - - - 160.47 - - - - - - --- - - - - - - - - - CRNN-EN - - - - - - - Text Recognition - - - - - - 100x32 - - - - - - 9.85 - - - - - - 158.82 - - - - - - 289.82 - - - - - - 185.45 - - - - - - 127.49 - - - - - - 67.15 - - - - - - 245.18 - - - - - - 134.47 - - - - - - 226.09 - - - - - - 427.47 - - - - - - 3247.56 - - - - - - 2217.15 - - - - - - 1432.21 - - - - - - 36.77 - - - - - - 13.58 - - - - - - 196.69 - - - - - - --- - - - - - - - - - CRNN-CN - - - - - - - Text Recognition - - - - - - 100x32 - - - - - - 11.03 - - - - - - 169.22 - - - - - - 318.96 - - - - - - 197.16 - - - - - - 135.27 - - - - - - 70.63 - - - - - - 259.28 - - - - - - 144.53 - - - - - - 240.95 - - - - - - 453.60 - - - - - - 3281.31 - - - - - - 2217.08 - - - - - - 1425.42 - - - - - - 44.97 - - - - - - 15.91 - - - - - - 197.61 - - - - - - --- - - - - - - - - - PP-ResNet - - - - - - - Image Classification - - - - - - 224x224 - - - - - - 19.47 - - - - - - 417.31 - - - - - - 499.55 - - - - - - 335.75 - - - - - - 219.81 - - - - - - 151.10 - - - - - - 346.78 - - - - - - 102.30 - - - - - - 420.93 - - - - - - 653.39 - - - - - - 15841.89 - - - - - - 11855.64 - - - - - - 8116.21 - - - - - - 98.80 - - - - - - 32.58 - - - - - - 73.19 - - - - - - 6.99 - - - - - - - - - MobileNet-V1 - - - - - - - Image Classification - - - - - - 224x224 - - - - - - 3.13 - - - - - - 70.20 - - - - - - 92.66 - - - - - - 59.27 - - - - - - 38.73 - - - - - - 27.45 - - - - - - 65.72 - - - - - - 18.47 - - - - - - 64.20 - - - - - - 119.71 - - - - - - 2123.08 - - - - - - 1567.09 - - - - - - 1079.69 - - - - - - 28.96 - - - - - - 8.41 - - - - - - 148.80\* - - - - - - 5.15 - - - - - - - - - MobileNet-V2 - - - - - - - Image Classification - - - - - - 224x224 - - - - - - 3.04 - - - - - - 61.72 - - - - - - 79.39 - - - - - - 52.17 - - - - - - 33.68 - - - - - - 22.95 - - - - - - 56.66 - - - - - - 17.08 - - - - - - 57.91 - - - - - - 102.57 - - - - - - 1619.08 - - - - - - 1188.83 - - - - - - 820.15 - - - - - - 28.61 - - - - - - 9.36 - - - - - - 143.17\* - - - - - - 5.41 - - - - - - - - - PP-HumanSeg - - - - - - - Human Segmentation - - - - - - 192x192 - - - - - - 5.59 - - - - - - 78.01 - - - - - - 102.49 - - - - - - 71.92 - - - - - - 47.68 - - - - - - 29.63 - - - - - - 64.95 - - - - - - 24.86 - - - - - - 71.35 - - - - - - 132.95 - - - - - - 2175.34 - - - - - - 1109.61 - - - - - - 1127.61 - - - - - - 67.25 - - - - - - 12.91 - - - - - - 28.75 - - - - - - 6.94 - - - - - - - - - WeChatQRCode - - - - - - - QR Code Detection and Parsing - - - - - - 100x100 - - - - - - 1.19 - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - 5.56 - - - - - - 3.53 - - - - - - 5.90 - - - - - - 9.93 - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - --- - - - - - - - - - YoutuReID - - - - - - - Person Re-Identification - - - - - - 128x256 - - - - - - 15.56 - - - - - - 285.75 - - - - - - 521.46 - - - - - - 327.07 - - - - - - 218.22 - - - - - - 148.01 - - - - - - 356.78 - - - - - - 93.58 - - - - - - 478.89 - - - - - - 678.74 - - - - - - 14895.99 - - - - - - 11143.23 - - - - - - 7613.64 - - - - - - 90.84 - - - - - - 23.53 - - - - - - 41.82 - - - - - - 5.58 - - - - - - - - - MP-PalmDet - - - - - - - Palm Detection - - - - - - 192x192 - - - - - - 5.35 - - - - - - 92.56 - - - - - - 159.80 - - - - - - 78.26 - - - - - - 54.45 - - - - - - 45.03 - - - - - - 75.30 - - - - - - 27.73 - - - - - - 97.05 - - - - - - 168.54 - - - - - - 1285.54 - - - - - - 905.77 - - - - - - 628.64 - - - - - - 37.61 - - - - - - 10.84 - - - - - - 37.34 - - - - - - 5.17 - - - - - - - - - MP-HandPose - - - - - - - Hand Pose Estimation - - - - - - 224x224 - - - - - - 2.40 - - - - - - 56.00 - - - - - - 67.85 - - - - - - 35.80 - - - - - - 25.83 - - - - - - 20.22 - - - - - - 38.45 - - - - - - 15.00 - - - - - - 42.58 - - - - - - 77.44 - - - - - - 664.73 - - - - - - 465.12 - - - - - - 319.69 - - - - - - 24.47 - - - - - - 12.29 - - - - - - 19.75 - - - - - - 6.27 - - - - - - - - - MP-PersonDet - - - - - - - Person Detection - - - - - - 224x224 - - - - - - 7.65 - - - - - - 90.13 - - - - - - 145.83 - - - - - - 83.22 - - - - - - 57.22 - - - - - - 42.90 - - - - - - 87.65 - - - - - - 33.05 - - - - - - 105.60 - - - - - - 172.55 - - - - - - 1849.87 - - - - - - 1315.48 - - - - - - 910.38 - - - - - - 37.39 - - - - - - 14.50 - - - - - - --- - - - - - - 16.45 - - - - - - - - - MP-Pose - - - - - - - Pose Estimation - - - - - - 256x256 - - - - - - 6.33 - - - - - - 83.16 - - - - - - 134.02 - - - - - - 75.38 - - - - - - 53.06 - - - - - - 37.06 - - - - - - 75.20 - - - - - - 31.51 - - - - - - 116.15 - - - - - - 162.87 - - - - - - 1045.98 - - - - - - 736.02 - - - - - - 524.52 - - - - - - 76.44 - - - - - - 26.54 - - - - - - --- - - - - - - --- - - - - - - - - - VitTrack - - - - - - - Object Tracking - - - - - - 1280x720 - - - - - - 4.01 - - - - - - 37.02 - - - - - - 143.62 - - - - - - 32.20 - - - - - - 29.47 - - - - - - 14.02 - - - - - - 48.39 - - - - - - 19.16 - - - - - - 48.55 - - - - - - 84.15 - - - - - - 548.36 - - - - - - 411.41 - - - - - - 288.95 - - - - - - 47.26 - - - - - - 19.75 - - - - - - --- - - - - - - --- - - - - - - Units: All data in milliseconds (ms). - - - \*: Models are quantized in per-channel mode, which run slower than per-tensor quantized models on NPU. - - - - - - - - - diff --git a/benchmark/config/face_detection_yunet.yaml b/benchmark/config/face_detection_yunet.yaml deleted file mode 100644 index 3a903126..00000000 --- a/benchmark/config/face_detection_yunet.yaml +++ /dev/null @@ -1,19 +0,0 @@ -Benchmark: - name: "Face Detection Benchmark" - type: "Detection" - data: - path: "data/face_detection" - files: ["group.jpg", "concerts.jpg", "dance.jpg"] - sizes: # [[w1, h1], ...], Omit to run at original scale - - [160, 120] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "YuNet" - confThreshold: 0.6 - nmsThreshold: 0.3 - topK: 5000 diff --git a/benchmark/config/face_recognition_sface.yaml b/benchmark/config/face_recognition_sface.yaml deleted file mode 100644 index ad66287b..00000000 --- a/benchmark/config/face_recognition_sface.yaml +++ /dev/null @@ -1,14 +0,0 @@ -Benchmark: - name: "Face Recognition Benchmark" - type: "Recognition" - data: - path: "data/face_recognition" - files: ["Aaron_Tippin_0001.jpg", "Alvaro_Uribe_0028.jpg", "Alvaro_Uribe_0029.jpg", "Jose_Luis_Rodriguez_Zapatero_0001.jpg"] - metric: # 'sizes' is omitted since this model requires input of fixed size - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "SFace" diff --git a/benchmark/config/facial_expression_recognition.yaml b/benchmark/config/facial_expression_recognition.yaml deleted file mode 100644 index 05f4af7f..00000000 --- a/benchmark/config/facial_expression_recognition.yaml +++ /dev/null @@ -1,14 +0,0 @@ -Benchmark: - name: "Facial Expression Recognition Benchmark" - type: "Recognition" - data: - path: "data/facial_expression_recognition/fer_evaluation" - files: ["RAF_test_0_61.jpg", "RAF_test_0_30.jpg", "RAF_test_6_25.jpg"] - metric: # 'sizes' is omitted since this model requires input of fixed size - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "FacialExpressionRecog" diff --git a/benchmark/config/handpose_estimation_mediapipe.yaml b/benchmark/config/handpose_estimation_mediapipe.yaml deleted file mode 100644 index 53170421..00000000 --- a/benchmark/config/handpose_estimation_mediapipe.yaml +++ /dev/null @@ -1,17 +0,0 @@ -Benchmark: - name: "Hand Pose Estimation Benchmark" - type: "Recognition" - data: - path: "data/palm_detection_20230125" - files: ["palm1.jpg", "palm2.jpg", "palm3.jpg"] - sizes: # [[w1, h1], ...], Omit to run at original scale - - [224, 224] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "MPHandPose" - confThreshold: 0.9 diff --git a/benchmark/config/human_segmentation_pphumanseg.yaml b/benchmark/config/human_segmentation_pphumanseg.yaml deleted file mode 100644 index 1849391c..00000000 --- a/benchmark/config/human_segmentation_pphumanseg.yaml +++ /dev/null @@ -1,16 +0,0 @@ -Benchmark: - name: "Human Segmentation Benchmark" - type: "Base" - data: - path: "data/human_segmentation" - files: ["messi5.jpg", "100040721_1.jpg", "detect.jpg"] - sizes: [[192, 192]] - toRGB: True - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "PPHumanSeg" diff --git a/benchmark/config/image_classification_mobilenet.yaml b/benchmark/config/image_classification_mobilenet.yaml deleted file mode 100644 index 54d1dabb..00000000 --- a/benchmark/config/image_classification_mobilenet.yaml +++ /dev/null @@ -1,17 +0,0 @@ -Benchmark: - name: "Image Classification Benchmark" - type: "Classification" - data: - path: "data/image_classification" - files: ["coffee_mug.jpg", "umbrella.jpg", "wall_clock.jpg"] - sizes: [[256, 256]] - toRGB: True - centerCrop: 224 - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "MobileNet" diff --git a/benchmark/config/image_classification_ppresnet.yaml b/benchmark/config/image_classification_ppresnet.yaml deleted file mode 100644 index e5403804..00000000 --- a/benchmark/config/image_classification_ppresnet.yaml +++ /dev/null @@ -1,17 +0,0 @@ -Benchmark: - name: "Image Classification Benchmark" - type: "Classification" - data: - path: "data/image_classification" - files: ["coffee_mug.jpg", "umbrella.jpg", "wall_clock.jpg"] - sizes: [[256, 256]] - toRGB: True - centerCrop: 224 - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "PPResNet" diff --git a/benchmark/config/license_plate_detection_yunet.yaml b/benchmark/config/license_plate_detection_yunet.yaml deleted file mode 100644 index bd3872d6..00000000 --- a/benchmark/config/license_plate_detection_yunet.yaml +++ /dev/null @@ -1,20 +0,0 @@ -Benchmark: - name: "License Plate Detection Benchmark" - type: "Detection" - data: - path: "data/license_plate_detection" - files: ["1.jpg", "2.jpg", "3.jpg", "4.jpg"] - sizes: # [[w1, h1], ...], Omit to run at original scale - - [320, 240] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "LPD_YuNet" - confThreshold: 0.8 - nmsThreshold: 0.3 - topK: 5000 - keepTopK: 750 diff --git a/benchmark/config/object_detection_nanodet.yaml b/benchmark/config/object_detection_nanodet.yaml deleted file mode 100644 index 77e16914..00000000 --- a/benchmark/config/object_detection_nanodet.yaml +++ /dev/null @@ -1,18 +0,0 @@ -Benchmark: - name: "Object Detection Benchmark" - type: "Detection" - data: - path: "data/object_detection" - files: ["1.png", "2.png", "3.png"] - sizes: - - [416, 416] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "NanoDet" - prob_threshold: 0.35 - iou_threshold: 0.6 diff --git a/benchmark/config/object_detection_yolox.yaml b/benchmark/config/object_detection_yolox.yaml deleted file mode 100644 index bfda9200..00000000 --- a/benchmark/config/object_detection_yolox.yaml +++ /dev/null @@ -1,19 +0,0 @@ -Benchmark: - name: "Object Detection Benchmark" - type: "Detection" - data: - path: "data/object_detection" - files: ["1.png", "2.png", "3.png"] - sizes: - - [640, 640] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "YoloX" - confThreshold: 0.35 - nmsThreshold: 0.5 - objThreshold: 0.5 diff --git a/benchmark/config/object_tracking_vittrack.yaml b/benchmark/config/object_tracking_vittrack.yaml deleted file mode 100644 index 6ed4cdf0..00000000 --- a/benchmark/config/object_tracking_vittrack.yaml +++ /dev/null @@ -1,14 +0,0 @@ -Benchmark: - name: "Object Tracking Benchmark" - type: "Tracking" - data: - type: "TrackingVideoLoader" - path: "data/object_tracking" - files: ["throw_cup.mp4"] - metric: - type: "Tracking" - backend: "default" - target: "cpu" - -Model: - name: "VitTrack" diff --git a/benchmark/config/palm_detection_mediapipe.yaml b/benchmark/config/palm_detection_mediapipe.yaml deleted file mode 100644 index e00bdb4a..00000000 --- a/benchmark/config/palm_detection_mediapipe.yaml +++ /dev/null @@ -1,19 +0,0 @@ -Benchmark: - name: "Palm Detection Benchmark" - type: "Detection" - data: - path: "data/palm_detection_20230125" - files: ["palm1.jpg", "palm2.jpg", "palm3.jpg"] - sizes: # [[w1, h1], ...], Omit to run at original scale - - [192, 192] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "MPPalmDet" - scoreThreshold: 0.5 - nmsThreshold: 0.3 - topK: 1 diff --git a/benchmark/config/person_detection_mediapipe.yaml b/benchmark/config/person_detection_mediapipe.yaml deleted file mode 100644 index ba95446c..00000000 --- a/benchmark/config/person_detection_mediapipe.yaml +++ /dev/null @@ -1,19 +0,0 @@ -Benchmark: - name: "Person Detection Benchmark" - type: "Detection" - data: - path: "data/person_detection" - files: ["person1.jpg", "person2.jpg", "person3.jpg"] - sizes: # [[w1, h1], ...], Omit to run at original scale - - [224, 224] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "MPPersonDet" - scoreThreshold: 0.5 - nmsThreshold: 0.3 - topK: 1 diff --git a/benchmark/config/person_reid_youtureid.yaml b/benchmark/config/person_reid_youtureid.yaml deleted file mode 100644 index 07ccf31a..00000000 --- a/benchmark/config/person_reid_youtureid.yaml +++ /dev/null @@ -1,15 +0,0 @@ -Benchmark: - name: "Person ReID Benchmark" - type: "Base" - data: - path: "data/person_reid" - files: ["0030_c1_f0056923.jpg", "0042_c5_f0068994.jpg", "0056_c8_f0017063.jpg"] - sizes: [[128, 256]] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "YoutuReID" diff --git a/benchmark/config/pose_estimation_mediapipe.yaml b/benchmark/config/pose_estimation_mediapipe.yaml deleted file mode 100644 index 7cfb26ac..00000000 --- a/benchmark/config/pose_estimation_mediapipe.yaml +++ /dev/null @@ -1,17 +0,0 @@ -Benchmark: - name: "Pose Estimation Benchmark" - type: "Recognition" - data: - path: "data/person_detection" - files: ["person1.jpg", "person2.jpg", "person3.jpg"] - sizes: # [[w1, h1], ...], Omit to run at original scale - - [256, 256] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "MPPose" - confThreshold: 0.9 diff --git a/benchmark/config/qrcode_wechatqrcode.yaml b/benchmark/config/qrcode_wechatqrcode.yaml deleted file mode 100644 index 267554c5..00000000 --- a/benchmark/config/qrcode_wechatqrcode.yaml +++ /dev/null @@ -1,16 +0,0 @@ -Benchmark: - name: "QRCode Detection and Decoding Benchmark" - type: "Detection" - data: - path: "data/qrcode" - files: ["opencv.png", "opencv_zoo.png"] - sizes: - - [100, 100] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "WeChatQRCode" diff --git a/benchmark/config/text_detection_ppocr.yaml b/benchmark/config/text_detection_ppocr.yaml deleted file mode 100644 index 80158536..00000000 --- a/benchmark/config/text_detection_ppocr.yaml +++ /dev/null @@ -1,20 +0,0 @@ -Benchmark: - name: "Text Detection Benchmark" - type: "Detection" - data: - path: "data/text" - files: ["1.jpg", "2.jpg", "3.jpg"] - sizes: # [[w1, h1], ...], Omit to run at original scale - - [640, 480] - metric: - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "PPOCRDet" - binaryThreshold: 0.3 - polygonThreshold: 0.5 - maxCandidates: 200 - unclipRatio: 2.0 diff --git a/benchmark/config/text_recognition_crnn.yaml b/benchmark/config/text_recognition_crnn.yaml deleted file mode 100644 index e4cdc9d0..00000000 --- a/benchmark/config/text_recognition_crnn.yaml +++ /dev/null @@ -1,14 +0,0 @@ -Benchmark: - name: "Text Recognition Benchmark" - type: "Recognition" - data: - path: "data/text" - files: ["1.jpg", "2.jpg", "3.jpg"] - metric: # 'sizes' is omitted since this model requires input of fixed size - warmup: 30 - repeat: 10 - backend: "default" - target: "cpu" - -Model: - name: "CRNN" diff --git a/benchmark/data/.gitignore b/benchmark/data/.gitignore deleted file mode 100644 index c96a04f0..00000000 --- a/benchmark/data/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!.gitignore \ No newline at end of file diff --git a/benchmark/download_data.py b/benchmark/download_data.py deleted file mode 100644 index 68033302..00000000 --- a/benchmark/download_data.py +++ /dev/null @@ -1,237 +0,0 @@ -import hashlib -import os -import sys -import tarfile -import zipfile -import requests -import os.path as osp - -from urllib.request import urlopen -from urllib.parse import urlparse - - -class Downloader: - MB = 1024*1024 - BUFSIZE = 10*MB - - def __init__(self, **kwargs): - self._name = kwargs.pop('name') - self._url = kwargs.pop('url', None) - self._filename = kwargs.pop('filename') - self._sha = kwargs.pop('sha', None) - self._saveTo = kwargs.pop('saveTo', './data') - self._extractTo = kwargs.pop('extractTo', './data') - - def __str__(self): - return 'Downloader for <{}>'.format(self._name) - - def printRequest(self, r): - def getMB(r): - d = dict(r.info()) - for c in ['content-length', 'Content-Length']: - if c in d: - return int(d[c]) / self.MB - return '' - print(' {} {} [{} Mb]'.format(r.getcode(), r.msg, getMB(r))) - - def verifyHash(self): - if not self._sha: - return False - sha = hashlib.sha1() - try: - with open(osp.join(self._saveTo, self._filename), 'rb') as f: - while True: - buf = f.read(self.BUFSIZE) - if not buf: - break - sha.update(buf) - if self._sha != sha.hexdigest(): - print(' actual {}'.format(sha.hexdigest())) - print(' expect {}'.format(self._sha)) - return self._sha == sha.hexdigest() - except Exception as e: - print(' catch {}'.format(e)) - - def get(self): - print(' {}: {}'.format(self._name, self._filename)) - if self.verifyHash(): - print(' hash match - skipping download') - else: - basedir = os.path.dirname(self._saveTo) - if basedir and not os.path.exists(basedir): - print(' creating directory: ' + basedir) - os.makedirs(basedir, exist_ok=True) - - print(' hash check failed - downloading') - if 'drive.google.com' in self._url: - urlquery = urlparse(self._url).query.split('&') - for q in urlquery: - if 'id=' in q: - gid = q[3:] - sz = GDrive(gid)(osp.join(self._saveTo, self._filename)) - print(' size = %.2f Mb' % (sz / (1024.0 * 1024))) - else: - print(' get {}'.format(self._url)) - self.download() - - # Verify hash after download - print(' done') - print(' file {}'.format(self._filename)) - if self.verifyHash(): - print(' hash match - extracting') - else: - print(' hash check failed - exiting') - - # Extract - if '.zip' in self._filename: - print(' extracting - ', end='') - self.extract() - print('done') - - return True - - def download(self): - try: - r = urlopen(self._url, timeout=60) - self.printRequest(r) - self.save(r) - except Exception as e: - print(' catch {}'.format(e)) - - def extract(self): - fileLocation = os.path.join(self._saveTo, self._filename) - try: - if self._filename.endswith('.zip'): - with zipfile.ZipFile(fileLocation) as f: - for member in f.namelist(): - path = osp.join(self._extractTo, member) - if osp.exists(path) or osp.isfile(path): - continue - else: - f.extract(member, self._extractTo) - except Exception as e: - print((' catch {}'.format(e))) - - def save(self, r): - with open(self._filename, 'wb') as f: - print(' progress ', end='') - sys.stdout.flush() - while True: - buf = r.read(self.BUFSIZE) - if not buf: - break - f.write(buf) - print('>', end='') - sys.stdout.flush() - - -def GDrive(gid): - def download_gdrive(dst): - session = requests.Session() # re-use cookies - - URL = "https://docs.google.com/uc?export=download" - response = session.get(URL, params = { 'id' : gid }, stream = True) - - def get_confirm_token(response): # in case of large files - for key, value in response.cookies.items(): - if key.startswith('download_warning'): - return value - return None - token = get_confirm_token(response) - - if token: - params = { 'id' : gid, 'confirm' : token } - response = session.get(URL, params = params, stream = True) - - BUFSIZE = 1024 * 1024 - PROGRESS_SIZE = 10 * 1024 * 1024 - - sz = 0 - progress_sz = PROGRESS_SIZE - with open(dst, "wb") as f: - for chunk in response.iter_content(BUFSIZE): - if not chunk: - continue # keep-alive - - f.write(chunk) - sz += len(chunk) - if sz >= progress_sz: - progress_sz += PROGRESS_SIZE - print('>', end='') - sys.stdout.flush() - print('') - return sz - return download_gdrive - -# Data will be downloaded and extracted to ./data by default -data_downloaders = dict( - face_detection=Downloader(name='face_detection', - url='https://drive.google.com/u/0/uc?id=1lOAliAIeOv4olM65YDzE55kn6XjiX2l6&export=download', - sha='0ba67a9cfd60f7fdb65cdb7c55a1ce76c1193df1', - filename='face_detection.zip'), - face_recognition=Downloader(name='face_recognition', - url='https://drive.google.com/u/0/uc?id=1BRIozREIzqkm_aMQ581j93oWoS-6TLST&export=download', - sha='03892b9036c58d9400255ff73858caeec1f46609', - filename='face_recognition.zip'), - facial_expression_recognition=Downloader(name='facial_expression_recognition', - url='https://drive.google.com/u/0/uc?id=13ZE0Pz302z1AQmBmYGuowkTiEXVLyFFZ&export=download', - sha='8f757559820c8eaa1b1e0065f9c3bbbd4f49efe2', - filename='facial_expression_recognition.zip'), - text=Downloader(name='text', - url='https://drive.google.com/u/0/uc?id=1lTQdZUau7ujHBqp0P6M1kccnnJgO-dRj&export=download', - sha='a40cf095ceb77159ddd2a5902f3b4329696dd866', - filename='text.zip'), - image_classification=Downloader(name='image_classification', - url='https://drive.google.com/u/0/uc?id=1qcsrX3CIAGTooB-9fLKYwcvoCuMgjzGU&export=download', - sha='987546f567f9f11d150eea78951024b55b015401', - filename='image_classification.zip'), - human_segmentation=Downloader(name='human_segmentation', - url='https://drive.google.com/u/0/uc?id=1Kh0qXcAZCEaqwavbUZubhRwrn_8zY7IL&export=download', - sha='ac0eedfd8568570cad135acccd08a134257314d0', - filename='human_segmentation.zip'), - qrcode=Downloader(name='qrcode', - url='https://drive.google.com/u/0/uc?id=1_OXB7eiCIYO335ewkT6EdAeXyriFlq_H&export=download', - sha='ac01c098934a353ca1545b5266de8bb4f176d1b3', - filename='qrcode.zip'), - object_tracking=Downloader(name='object_tracking', - url='https://drive.google.com/u/0/uc?id=1_cw5pUmTF-XmQVcQAI8fIp-Ewi2oMYIn&export=download', - sha='0bdb042632a245270013713bc48ad35e9221f3bb', - filename='object_tracking.zip'), - person_reid=Downloader(name='person_reid', - url='https://drive.google.com/u/0/uc?id=1G8FkfVo5qcuyMkjSs4EA6J5e16SWDGI2&export=download', - sha='5b741fbf34c1fbcf59cad8f2a65327a5899e66f1', - filename='person_reid.zip'), - palm_detection=Downloader(name='palm_detection', - url='https://drive.google.com/u/0/uc?id=1Z4KvccTZPeZ0qFLZ6saBt_TvcKYyo9JE&export=download', - sha='4b5bb24a51daab8913957e60245a4eb766c8cf2e', - filename='palm_detection_20230125.zip'), - license_plate_detection=Downloader(name='license_plate_detection', - url='https://drive.google.com/u/0/uc?id=1cf9MEyUqMMy8lLeDGd1any6tM_SsSmny&export=download', - sha='997acb143ddc4531e6e41365fb7ad4722064564c', - filename='license_plate_detection.zip'), - object_detection=Downloader(name='object_detection', - url='https://drive.google.com/u/0/uc?id=1LUUrQIWYYtiGoNAL_twZvdw5NkC39Swe&export=download', - sha='4161a5cd3b0be1f51484abacf19dc9a2231e9894', - filename='object_detection.zip'), - person_detection=Downloader(name='person_detection', - url='https://drive.google.com/u/0/uc?id=1RbLyetgqFUTt0IHaVmu6c_b7KeXJgKbc&export=download', - sha='fbae2fb0a47fe65e316bbd0ec57ba21461967550', - filename='person_detection.zip'), -) - -if __name__ == '__main__': - selected_data_names = [] - for i in range(1, len(sys.argv)): - selected_data_names.append(sys.argv[i]) - if not selected_data_names: - selected_data_names = list(data_downloaders.keys()) - print('Data will be downloaded: {}'.format(str(selected_data_names))) - - download_failed = [] - for selected_data_name in selected_data_names: - downloader = data_downloaders[selected_data_name] - if not downloader.get(): - download_failed.append(downloader._name) - - if download_failed: - print('Data have not been downloaded: {}'.format(str(download_failed))) diff --git a/benchmark/generate_table.py b/benchmark/generate_table.py deleted file mode 100644 index daf258d6..00000000 --- a/benchmark/generate_table.py +++ /dev/null @@ -1,277 +0,0 @@ -import re -import matplotlib.pyplot as plt -import matplotlib as mpl -import numpy as np -import yaml - - -# parse a '.md' file and find a table. return table information -def parse_table(filepath, cfg): - # parse benchmark data - def _parse_benchmark_data(lines): - raw_data = [] - for l in lines: - l = l.strip() - # parse each line - m = re.match(r"(\d+\.?\d*)\s+(\d+\.?\d*)\s+(\d+\.?\d*)\s+\[([^]]*)]\s+(.*)", l) - if m: - raw_data.append(m.groups()) - return raw_data - - # find each cpu, gpu, npu block - def _find_all_platform_block(lines): - cur_start = None - cur_platform = None - platform_block = dict() - for i in range(len(lines)): - l = lines[i].strip() - # found start and end of a platform - if l.startswith("CPU") or l.startswith("GPU") or l.startswith("NPU"): - if cur_platform is not None: - platform_block[cur_platform] = (cur_start, i) - cur_platform = l[:-1] - cur_start = i + 1 - continue - if cur_platform is not None and i == len(lines) - 1: - platform_block[cur_platform] = (cur_start, i) - for key in platform_block: - r = platform_block[key] - platform_block[key] = _parse_benchmark_data(lines[r[0]:r[1]]) - - return platform_block - - # find device block - def _find_all_device_block(lines, level): - cur_start = None - cur_device_name = None - device_block = dict() - for i in range(len(lines)): - l = lines[i].strip() - m = re.match(r"^(#+)\s+(.*)", l) - # found start and end of a device - if m and len(m.group(1)) == level: - if cur_device_name is not None: - device_block[cur_device_name] = (cur_start, i) - cur_device_name = m.group(2) - cur_start = i + 1 - continue - if cur_device_name is not None and i == len(lines) - 1: - device_block[cur_device_name] = (cur_start, i) - - for key in device_block: - r = device_block[key] - device_block[key] = _find_all_platform_block(lines[r[0]:r[1]]) - - return device_block - - # find detail block - def _find_detail_block(lines, title, level): - start = None - end = len(lines) - for i in range(len(lines)): - l = lines[i].strip() - m = re.match(r"^(#+)\s+(.*)", l) - # found start of detailed results block - if m and len(m.group(1)) == level and m.group(2) == title: - start = i + 1 - continue - # found end of detailed results block - if start is not None and m and len(m.group(1)) <= level: - end = i - break - - return _find_all_device_block(lines[start:end], level + 1) - - with open(filepath, "r", encoding="utf-8") as f: - content = f.read() - lines = content.split("\n") - - devices = cfg["Devices"] - models = cfg["Models"] - # display information of all devices - devices_display = [x['display_info'] for x in cfg["Devices"]] - header = ["Model", "Task", "Input Size"] + devices_display - body = [[x["name"], x["task"], x["input_size"]] + ["---"] * len(devices) for x in models] - table_raw_data = _find_detail_block(lines, title="Detailed Results", level=2) - - device_name_header = [f"{x['name']}-{x['platform']}" for x in devices] - device_name_header = [""] * (len(header) - len(device_name_header)) + device_name_header - # device name map to model col idx - device_name_to_col_idx = {k: v for v, k in enumerate(device_name_header)} - # model name map to model row idx - model_name_to_row_idx = {k[0]: v for v, k in enumerate(body)} - # convert raw data to usage data - for device in devices: - raw_data = table_raw_data[device["name"]][device["platform"]] - col_idx = device_name_to_col_idx[f"{device['name']}-{device['platform']}"] - for model in models: - # find which row idx of this model - row_idx = model_name_to_row_idx[model["name"]] - model_idxs = [i for i in range(len(raw_data)) if model["keyword"] in raw_data[i][-1]] - if len(model_idxs) > 0: - # only choose the first one - model_idx = model_idxs[0] - # choose mean as value - body[row_idx][col_idx] = raw_data[model_idx][0] - # remove used data - for idx in sorted(model_idxs, reverse=True): - raw_data.pop(idx) - - # handle suffix - for suffix in cfg["Suffixes"]: - row_idx = model_name_to_row_idx[suffix["model"]] - col_idx = device_name_to_col_idx[f"{suffix['device']}-{suffix['platform']}"] - body[row_idx][col_idx] += suffix["str"] - - return header, body - - -# render table and save -def render_table(header, body, save_path, cfg, cmap_type): - # parse models information and return some data - def _parse_data(models_info, cmap, cfg): - min_list = [] - max_list = [] - colors = [] - # model name map to idx - model_name_to_idx = {k["name"]: v for v, k in enumerate(cfg["Models"])} - for model in models_info: - # remove \* - data = [x.replace("\\*", "") for x in model] - # get max data - max_idx = -1 - min_data = 9999999 - min_idx = -1 - - for i in range(len(data)): - try: - d = float(data[i]) - if d < min_data: - min_data = d - min_idx = i - except: - pass - # set all bigger than acceptable time to red color - idx = model_name_to_idx[model[0]] - acc_time = cfg["Models"][idx]["acceptable_time"] - - min_list.append(min_idx) - max_list.append(max_idx) - - # calculate colors - color = [] - for t in data: - try: - t = float(t) - if t > acc_time: - # all bigger time will be set to red - color.append(cmap(1.)) - else: - # sqrt to make the result non-linear - t = np.sqrt((t - min_data) / (acc_time - min_data)) - color.append(cmap(t)) - except: - color.append('white') - colors.append(color) - return colors, min_list, max_list - - cmap = mpl.colormaps.get_cmap(cmap_type) - table_colors, min_list, max_list = _parse_data(body, cmap, cfg) - table_texts = [header] + body - table_colors = [['white'] * len(header)] + table_colors - - # create a figure, base width set to 1000, height set to 80 - fig, axs = plt.subplots(nrows=3, figsize=(10, 0.8)) - # turn off labels and axis - for ax in axs: - ax.set_axis_off() - ax.set_xticks([]) - ax.set_yticks([]) - - # create and add a color map - gradient = np.linspace(0, 1, 256) - gradient = np.vstack((gradient, gradient)) - axs[0].imshow(gradient, aspect='auto', cmap=cmap) - axs[0].text(-0.01, 0.5, "Faster", va='center', ha='right', fontsize=11, transform=axs[0].transAxes) - axs[0].text(1.01, 0.5, "Slower", va='center', ha='left', fontsize=11, transform=axs[0].transAxes) - - # initialize a table - table = axs[1].table(cellText=table_texts, - cellColours=table_colors, - cellLoc="left", - loc="upper left") - # set style of header, each url of hardware - ori_height = table[0, 0].get_height() - url_base = 'https://github.com/opencv/opencv_zoo/tree/main/benchmark#' - hw_urls = [f"{url_base}{x['name'].lower().replace(' ', '-')}" for x in cfg["Devices"]] - hw_urls = [""] * 3 + hw_urls - for col in range(len(header)): - cell = table[0, col] - cell.set_text_props(ha='center', weight='bold', linespacing=1.5, url=hw_urls[col]) - cell.set_url(hw_urls[col]) - cell.set_height(ori_height * 2.2) - - url_base = 'https://github.com/opencv/opencv_zoo/tree/main/models/' - model_urls = [f"{url_base}{x['folder']}" for x in cfg["Models"]] - model_urls = [""] + model_urls - for row in range(len(body) + 1): - cell = table[row, 0] - cell.set_text_props(url=model_urls[row]) - cell.set_url(model_urls[row]) - - # adjust table position - table_pos = axs[1].get_position() - axs[1].set_position([ - table_pos.x0, - table_pos.y0 - table_pos.height, - table_pos.width, - table_pos.height - ]) - - table.set_fontsize(11) - table.auto_set_font_size(False) - table.scale(1, 2) - table.auto_set_column_width(list(range(len(table_texts[0])))) - table.AXESPAD = 0 # cancel padding - - # highlight the best number - for i in range(len(min_list)): - cell = table.get_celld()[(i + 1, min_list[i])] - cell.set_text_props(weight='bold', color='white') - - # draw table and trigger changing the column width value - fig.canvas.draw() - # calculate table height and width - table_height = 0 - table_width = 0 - for i in range(len(table_texts)): - cell = table.get_celld()[(i, 0)] - table_height += cell.get_height() - for i in range(len(table_texts[0])): - cell = table.get_celld()[(0, i)] - table_width += cell.get_width() - - # add notes for table - axs[2].text(0, -table_height - 1, "Units: All data in milliseconds (ms).", va='bottom', ha='left', fontsize=11, transform=axs[1].transAxes) - axs[2].text(0, -table_height - 2, "\\*: Models are quantized in per-channel mode, which run slower than per-tensor quantized models on NPU.", va='bottom', ha='left', fontsize=11, transform=axs[1].transAxes) - - # adjust color map position to center - cm_pos = axs[0].get_position() - axs[0].set_position([ - (table_width - 1) / 2, - cm_pos.y0, - cm_pos.width, - cm_pos.height - ]) - - plt.rcParams['svg.fonttype'] = 'none' - plt.rcParams['svg.hashsalt'] = '11' # fix hash salt for avoiding id change - plt.savefig(save_path, format='svg', bbox_inches="tight", pad_inches=0, metadata={'Date': None, 'Creator': None}) - - -if __name__ == '__main__': - with open("table_config.yaml", 'r') as f: - cfg = yaml.safe_load(f) - - hw_info, model_info = parse_table("README.md", cfg) - render_table(hw_info, model_info, "color_table.svg", cfg, "RdYlGn_r") diff --git a/benchmark/requirements.txt b/benchmark/requirements.txt deleted file mode 100644 index 917b1045..00000000 --- a/benchmark/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -numpy -opencv-python<5.0 -pyyaml -requests -matplotlib>=3.7.1 \ No newline at end of file diff --git a/benchmark/table_config.yaml b/benchmark/table_config.yaml deleted file mode 100644 index c84f62f9..00000000 --- a/benchmark/table_config.yaml +++ /dev/null @@ -1,246 +0,0 @@ -# model information -# - name: model name, used for display -# task: model task, used for display -# input_size: input size, used for display -# folder: which folder the model located in, used for jumping to model detail -# acceptable_time: maximum acceptable inference time, large ones will be marked red -# keyword: used to specify this model from all benchmark results -# -# device information -# - name: full device name used to identify the device block, and jump to device detail -# display_info: device information for display -# platform: used to identify benchmark result of specific platform -# -# suffix information -# - model: which model -# device: which device -# suffix: this suffix will be appended to end of this text - -Models: - - name: "YuNet" - task: "Face Detection" - input_size: "160x120" - folder: "face_detection_yunet" - acceptable_time: 50 - keyword: "face_detection_yunet" - - - name: "SFace" - task: "Face Recognition" - input_size: "112x112" - folder: "face_recognition_sface" - acceptable_time: 200 - keyword: "face_recognition_sface" - - - name: "FER" - task: "Face Expression Recognition" - input_size: "112x112" - folder: "facial_expression_recognition" - acceptable_time: 200 - keyword: "facial_expression_recognition_mobilefacenet" - - - name: "LPD_YuNet" - task: "License Plate Detection" - input_size: "320x240" - folder: "license_plate_detection_yunet" - acceptable_time: 700 - keyword: "license_plate_detection_lpd_yunet" - - - name: "YOLOX" - task: "Object Detection" - input_size: "640x640" - folder: "object_detection_yolox" - acceptable_time: 2800 - keyword: "object_detection_yolox" - - - name: "NanoDet" - task: "Object Detection" - input_size: "416x416" - folder: "object_detection_nanodet" - acceptable_time: 2000 - keyword: "object_detection_nanodet" - - - name: "PPOCRDet-CN" - task: "Text Detection" - input_size: "640x480" - folder: "text_detection_ppocr" - acceptable_time: 2000 - keyword: "text_detection_cn_ppocrv3_2023may" - - - name: "PPOCRDet-EN" - task: "Text Detection" - input_size: "640x480" - folder: "text_detection_ppocr" - acceptable_time: 2000 - keyword: "text_detection_en_ppocrv3_2023may" - - - name: "CRNN-EN" - task: "Text Recognition" - input_size: "100x32" - folder: "text_recognition_crnn" - acceptable_time: 2000 - keyword: "text_recognition_CRNN_EN" - - - name: "CRNN-CN" - task: "Text Recognition" - input_size: "100x32" - folder: "text_recognition_crnn" - acceptable_time: 2000 - keyword: "text_recognition_CRNN_CN" - - - name: "PP-ResNet" - task: "Image Classification" - input_size: "224x224" - folder: "image_classification_ppresnet" - acceptable_time: 1000 - keyword: "image_classification_ppresnet50" - - - name: "MobileNet-V1" - task: "Image Classification" - input_size: "224x224" - folder: "image_classification_mobilenet" - acceptable_time: 500 - keyword: "image_classification_mobilenetv1" - - - name: "MobileNet-V2" - task: "Image Classification" - input_size: "224x224" - folder: "image_classification_mobilenet" - acceptable_time: 500 - keyword: "image_classification_mobilenetv2" - - - name: "PP-HumanSeg" - task: "Human Segmentation" - input_size: "192x192" - folder: "human_segmentation_pphumanseg" - acceptable_time: 700 - keyword: "human_segmentation_pphumanseg" - - - name: "WeChatQRCode" - task: "QR Code Detection and Parsing" - input_size: "100x100" - folder: "qrcode_wechatqrcode" - acceptable_time: 100 - keyword: "WeChatQRCode" - - - name: "YoutuReID" - task: "Person Re-Identification" - input_size: "128x256" - folder: "person_reid_youtureid" - acceptable_time: 800 - keyword: "person_reid_youtu" - - - name: "MP-PalmDet" - task: "Palm Detection" - input_size: "192x192" - folder: "palm_detection_mediapipe" - acceptable_time: 500 - keyword: "palm_detection_mediapipe" - - - name: "MP-HandPose" - task: "Hand Pose Estimation" - input_size: "224x224" - folder: "handpose_estimation_mediapipe" - acceptable_time: 500 - keyword: "handpose_estimation_mediapipe" - - - name: "MP-PersonDet" - task: "Person Detection" - input_size: "224x224" - folder: "person_detection_mediapipe" - acceptable_time: 1300 - keyword: "person_detection_mediapipe" - - - name: "MP-Pose" - task: "Pose Estimation" - input_size: "256x256" - folder: "pose_estimation_mediapipe" - acceptable_time: 700 - keyword: "pose_estimation_mediapipe" - - - name: "VitTrack" - task: "Object Tracking" - input_size: "1280x720" - folder: "object_tracking_vittrack" - acceptable_time: 1000 - keyword: "object_tracking_vittrack" - - -Devices: - - name: "Intel 12700K" - display_info: "Intel\n12700K\nCPU" - platform: "CPU" - - - name: "Atlas 200I DK A2" - display_info: "Atlas 200I DK A2\nAscend 310B\nCPU" - platform: "CPU" - - - name: "Atlas 200 DK" - display_info: "Atlas 200 DK\nAscend 310\nCPU" - platform: "CPU" - - - name: "Khadas VIM3" - display_info: "Khadas VIM3\nA311D\nCPU" - platform: "CPU" - - - name: "Khadas VIM4" - display_info: "Khadas VIM4\nA311D2\nCPU" - platform: "CPU" - - - name: "Khadas Edge2 (with RK3588)" - display_info: "Khadas Edge2\nRK3588S\nCPU" - platform: "CPU" - - - name: "Jetson Nano B01" - display_info: "Jetson Nano\nB01\nCPU" - platform: "CPU" - - - name: "Jetson Nano Orin" - display_info: "Jetson Nano\nOrin\nCPU" - platform: "CPU" - - - name: "Raspberry Pi 4B" - display_info: "Raspberry Pi 4B\nBCM2711\nCPU" - platform: "CPU" - - - name: "Horizon Sunrise X3 PI" - display_info: "Horizon Sunrise Pi\nX3\nCPU" - platform: "CPU" - - - name: "MAIX-III AX-PI" - display_info: "MAIX-III AX-Pi\nAX620A\nCPU" - platform: "CPU" - - - name: "Toybrick RV1126" - display_info: "Toybrick\nRV1126\nCPU" - platform: "CPU" - - - name: "StarFive VisionFive 2" - display_info: "StarFive VisionFive 2\nStarFive JH7110\nCPU" - platform: "CPU" - - - name: "Jetson Nano B01" - display_info: "Jetson Nano\nB01\nGPU" - platform: "GPU (CUDA-FP32)" - - - name: "Jetson Nano Orin" - display_info: "Jetson Nano\nOrin\nGPU" - platform: "GPU (CUDA-FP32)" - - - name: "Khadas VIM3" - display_info: "Khadas VIM3\nA311D\nNPU" - platform: "NPU (TIMVX)" - - - name: "Atlas 200 DK" - display_info: "Atlas 200 DK\nAscend 310\nNPU" - platform: "NPU (CANN)" - -Suffixes: - - model: "MobileNet-V1" - device: "Khadas VIM3" - platform: "NPU (TIMVX)" - str: "\\*" - - - model: "MobileNet-V2" - device: "Khadas VIM3" - platform: "NPU (TIMVX)" - str: "\\*" diff --git a/benchmark/utils/__init__.py b/benchmark/utils/__init__.py deleted file mode 100644 index fb908474..00000000 --- a/benchmark/utils/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .factory import (METRICS, DATALOADERS) -from .metrics import * -from .dataloaders import * - -__all__ = ['METRICS', 'DATALOADERS'] \ No newline at end of file diff --git a/benchmark/utils/dataloaders/__init__.py b/benchmark/utils/dataloaders/__init__.py deleted file mode 100644 index 5d0e4aed..00000000 --- a/benchmark/utils/dataloaders/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .base import BaseImageLoader, BaseVideoLoader -from .classification import ClassificationImageLoader -from .recognition import RecognitionImageLoader -from .tracking import TrackingVideoLoader - -__all__ = ['BaseImageLoader', 'BaseVideoLoader', 'ClassificationImageLoader', 'RecognitionImageLoader', 'TrackingVideoLoader'] \ No newline at end of file diff --git a/benchmark/utils/dataloaders/base.py b/benchmark/utils/dataloaders/base.py deleted file mode 100644 index 229b1b21..00000000 --- a/benchmark/utils/dataloaders/base.py +++ /dev/null @@ -1,12 +0,0 @@ -from .base_dataloader import _BaseImageLoader, _BaseVideoLoader -from ..factory import DATALOADERS - -@DATALOADERS.register -class BaseImageLoader(_BaseImageLoader): - def __init__(self, **kwargs): - super().__init__(**kwargs) - -@DATALOADERS.register -class BaseVideoLoader(_BaseVideoLoader): - def __init__(self, **kwargs): - super().__init__(**kwargs) \ No newline at end of file diff --git a/benchmark/utils/dataloaders/base_dataloader.py b/benchmark/utils/dataloaders/base_dataloader.py deleted file mode 100644 index 89416f19..00000000 --- a/benchmark/utils/dataloaders/base_dataloader.py +++ /dev/null @@ -1,84 +0,0 @@ -import os - -import cv2 as cv - -class _BaseImageLoader: - def __init__(self, **kwargs): - self._path = kwargs.pop('path', None) - assert self._path, 'Benchmark[\'data\'][\'path\'] cannot be empty.' - - self._files = kwargs.pop('files', None) - assert self._files, 'Benchmark[\'data\'][\'files\'] cannot be empty' - self._len_files = len(self._files) - - self._sizes = kwargs.pop('sizes', [[0, 0]]) - self._len_sizes = len(self._sizes) - - @property - def name(self): - return self.__class__.__name__ - - def __len__(self): - return self._len_files * self._len_sizes - - def __iter__(self): - for filename in self._files: - image = cv.imread(os.path.join(self._path, filename)) - if [0, 0] in self._sizes: - yield filename, image - else: - for size in self._sizes: - image_r = cv.resize(image, size) - yield filename, image_r - -class _VideoStream: - def __init__(self, filepath): - self._filepath = filepath - self._video = cv.VideoCapture(self._filepath) - - def __iter__(self): - while True: - has_frame, frame = self._video.read() - if has_frame: - yield frame - else: - break - - def __next__(self): - while True: - has_frame, frame = self._video.read() - if has_frame: - return frame - else: - break - - def reload(self): - self._video = cv.VideoCapture(self._filepath) - - def getFrameSize(self): - w = int(self._video.get(cv.CAP_PROP_FRAME_WIDTH)) - h = int(self._video.get(cv.CAP_PROP_FRAME_HEIGHT)) - return [w, h] - - -class _BaseVideoLoader: - def __init__(self, **kwargs): - self._path = kwargs.pop('path', None) - assert self._path, 'Benchmark[\'data\'][\'path\'] cannot be empty.' - - self._files = kwargs.pop('files', None) - assert self._files,'Benchmark[\'data\'][\'files\'] cannot be empty.' - - self._streams = dict() - for filename in self._files: - self._streams[filename] = _VideoStream(os.path.join(self._path, filename)) - - @property - def name(self): - return self.__class__.__name__ - - def __len__(self): - return len(self._files) - - def __getitem__(self, idx): - return self._files[idx], self._streams[idx] \ No newline at end of file diff --git a/benchmark/utils/dataloaders/classification.py b/benchmark/utils/dataloaders/classification.py deleted file mode 100644 index 72c50b89..00000000 --- a/benchmark/utils/dataloaders/classification.py +++ /dev/null @@ -1,42 +0,0 @@ -import os - -import numpy as np -import cv2 as cv - -from .base_dataloader import _BaseImageLoader -from ..factory import DATALOADERS - -@DATALOADERS.register -class ClassificationImageLoader(_BaseImageLoader): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - self._to_rgb = kwargs.pop('toRGB', False) - self._center_crop = kwargs.pop('centerCrop', None) - - def _toRGB(self, image): - return cv.cvtColor(image, cv.COLOR_BGR2RGB) - - def _centerCrop(self, image): - h, w, _ = image.shape - w_crop = int((w - self._center_crop) / 2.) - assert w_crop >= 0 - h_crop = int((h - self._center_crop) / 2.) - assert h_crop >= 0 - return image[w_crop:w-w_crop, h_crop:h-h_crop, :] - - def __iter__(self): - for filename in self._files: - image = cv.imread(os.path.join(self._path, filename)) - - if self._to_rgb: - image = self._toRGB(image) - - if [0, 0] in self._sizes: - yield filename, image - else: - for size in self._sizes: - image = cv.resize(image, size) - if self._center_crop: - image = self._centerCrop(image) - yield filename, image \ No newline at end of file diff --git a/benchmark/utils/dataloaders/recognition.py b/benchmark/utils/dataloaders/recognition.py deleted file mode 100644 index 62c77f23..00000000 --- a/benchmark/utils/dataloaders/recognition.py +++ /dev/null @@ -1,33 +0,0 @@ -import os - -import numpy as np -import cv2 as cv - -from .base_dataloader import _BaseImageLoader -from ..factory import DATALOADERS - -@DATALOADERS.register -class RecognitionImageLoader(_BaseImageLoader): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - self._labels = self._load_label() - - def _load_label(self): - labels = dict.fromkeys(self._files, None) - for filename in self._files: - if os.path.exists(os.path.join(self._path, '{}.txt'.format(filename[:-4]))): - labels[filename] = np.loadtxt(os.path.join(self._path, '{}.txt'.format(filename[:-4])), ndmin=2) - else: - labels[filename] = None - return labels - - def __iter__(self): - for filename in self._files: - image = cv.imread(os.path.join(self._path, filename)) - if [0, 0] in self._sizes: - yield filename, image, self._labels[filename] - else: - for size in self._sizes: - image_r = cv.resize(image, size) - yield filename, image_r, self._labels[filename] \ No newline at end of file diff --git a/benchmark/utils/dataloaders/tracking.py b/benchmark/utils/dataloaders/tracking.py deleted file mode 100644 index 1797fd65..00000000 --- a/benchmark/utils/dataloaders/tracking.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -import numpy as np - -from .base_dataloader import _BaseVideoLoader -from ..factory import DATALOADERS - -@DATALOADERS.register -class TrackingVideoLoader(_BaseVideoLoader): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - self._first_frames = dict() - for filename in self._files: - stream = self._streams[filename] - self._first_frames[filename] = next(stream) - - self._rois = self._load_roi() - - def _load_roi(self): - rois = dict.fromkeys(self._files, None) - for filename in self._files: - rois[filename] = np.loadtxt(os.path.join(self._path, '{}.txt'.format(filename[:-4])), dtype=np.int32, ndmin=2) - return rois - - def __getitem__(self, idx): - filename = self._files[idx] - return filename, self._streams[filename], self._first_frames[filename], self._rois[filename] \ No newline at end of file diff --git a/benchmark/utils/factory.py b/benchmark/utils/factory.py deleted file mode 100644 index 6325e7a0..00000000 --- a/benchmark/utils/factory.py +++ /dev/null @@ -1,20 +0,0 @@ -class Registery: - def __init__(self, name): - self._name = name - self._dict = dict() - - def get(self, key): - if key in self._dict: - return self._dict[key] - else: - return self._dict['Base'] - - def register(self, item): - self._dict[item.__name__] = item - # renaming *ImageLoader/*VideoLoader - if 'ImageLoader' in item.__name__: - name = item.__name__.replace('ImageLoader', '') - self._dict[name] = item - -METRICS = Registery('Metrics') -DATALOADERS = Registery('DataLoaders') \ No newline at end of file diff --git a/benchmark/utils/metrics/__init__.py b/benchmark/utils/metrics/__init__.py deleted file mode 100644 index 9f524870..00000000 --- a/benchmark/utils/metrics/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .base import Base -from .detection import Detection -from .recognition import Recognition -from .tracking import Tracking - -__all__ = ['Base', 'Detection', 'Recognition', 'Tracking'] \ No newline at end of file diff --git a/benchmark/utils/metrics/base.py b/benchmark/utils/metrics/base.py deleted file mode 100644 index 15eeafb7..00000000 --- a/benchmark/utils/metrics/base.py +++ /dev/null @@ -1,24 +0,0 @@ -import cv2 as cv - -from .base_metric import BaseMetric -from ..factory import METRICS - -@METRICS.register -class Base(BaseMetric): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - def forward(self, model, *args, **kwargs): - img = args[0] - - # warmup - for _ in range(self._warmup): - model.infer(img) - # repeat - self._timer.reset() - for _ in range(self._repeat): - self._timer.start() - model.infer(img) - self._timer.stop() - - return self._timer.getRecords() diff --git a/benchmark/utils/metrics/base_metric.py b/benchmark/utils/metrics/base_metric.py deleted file mode 100644 index 8cfb6c87..00000000 --- a/benchmark/utils/metrics/base_metric.py +++ /dev/null @@ -1,41 +0,0 @@ -import cv2 as cv - -from ..timer import Timer - -class BaseMetric: - def __init__(self, **kwargs): - self._warmup = kwargs.pop('warmup', 3) - self._repeat = kwargs.pop('repeat', 10) - - self._timer = Timer() - - def _calcMedian(self, records): - ''' Return the median of records - ''' - l = len(records) - mid = int(l / 2) - if l % 2 == 0: - return (records[mid] + records[mid - 1]) / 2 - else: - return records[mid] - - def _calcMean(self, records, drop_largest=1): - ''' Return the mean of records after dropping drop_largest - ''' - l = len(records) - if l <= drop_largest: - print('len(records)({}) <= drop_largest({}), stop dropping.'.format(l, drop_largest)) - records_sorted = sorted(records, reverse=True) - return sum(records_sorted[drop_largest:]) / (l - drop_largest) - - def _calcMin(self, records): - return min(records) - - def getPerfStats(self, records): - mean = self._calcMean(records, int(len(records) / 10)) - median = self._calcMedian(records) - minimum = self._calcMin(records) - return [mean, median, minimum] - - def forward(self, model, *args, **kwargs): - raise NotImplementedError('Not implemented') diff --git a/benchmark/utils/metrics/detection.py b/benchmark/utils/metrics/detection.py deleted file mode 100644 index ce1a6532..00000000 --- a/benchmark/utils/metrics/detection.py +++ /dev/null @@ -1,29 +0,0 @@ -import cv2 as cv - -from .base_metric import BaseMetric -from ..factory import METRICS - -@METRICS.register -class Detection(BaseMetric): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - def forward(self, model, *args, **kwargs): - img = args[0] - size = [img.shape[1], img.shape[0]] - try: - model.setInputSize(size) - except: - pass - - # warmup - for _ in range(self._warmup): - model.infer(img) - # repeat - self._timer.reset() - for _ in range(self._repeat): - self._timer.start() - model.infer(img) - self._timer.stop() - - return self._timer.getRecords() diff --git a/benchmark/utils/metrics/recognition.py b/benchmark/utils/metrics/recognition.py deleted file mode 100644 index b34b44bd..00000000 --- a/benchmark/utils/metrics/recognition.py +++ /dev/null @@ -1,31 +0,0 @@ -import cv2 as cv - -from .base_metric import BaseMetric -from ..factory import METRICS - -@METRICS.register -class Recognition(BaseMetric): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - def forward(self, model, *args, **kwargs): - img, bboxes = args - - self._timer.reset() - if bboxes is not None: - for idx, bbox in enumerate(bboxes): - for _ in range(self._warmup): - model.infer(img, bbox) - for _ in range(self._repeat): - self._timer.start() - model.infer(img, bbox) - self._timer.stop() - else: - for _ in range(self._warmup): - model.infer(img, None) - for _ in range(self._repeat): - self._timer.start() - model.infer(img, None) - self._timer.stop() - - return self._timer.getRecords() diff --git a/benchmark/utils/metrics/tracking.py b/benchmark/utils/metrics/tracking.py deleted file mode 100644 index 29cede13..00000000 --- a/benchmark/utils/metrics/tracking.py +++ /dev/null @@ -1,26 +0,0 @@ -import cv2 as cv - -from .base_metric import BaseMetric -from ..factory import METRICS - -@METRICS.register -class Tracking(BaseMetric): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - # if self._warmup or self._repeat: - # print('warmup and repeat in metric for tracking do not function.') - - def forward(self, model, *args, **kwargs): - stream, first_frame, rois = args - - for roi in rois: - stream.reload() - model.init(first_frame, tuple(roi)) - self._timer.reset() - for frame in stream: - self._timer.start() - model.infer(frame) - self._timer.stop() - - return self._timer.getRecords() diff --git a/benchmark/utils/timer.py b/benchmark/utils/timer.py deleted file mode 100644 index dcff0cbe..00000000 --- a/benchmark/utils/timer.py +++ /dev/null @@ -1,20 +0,0 @@ -import cv2 as cv - -class Timer: - def __init__(self): - self._tm = cv.TickMeter() - self._record = [] - - def start(self): - self._tm.start() - - def stop(self): - self._tm.stop() - self._record.append(self._tm.getTimeMilli()) - self._tm.reset() - - def reset(self): - self._record = [] - - def getRecords(self): - return self._record \ No newline at end of file diff --git a/models/__init__.py b/models/__init__.py deleted file mode 100644 index d51c45b7..00000000 --- a/models/__init__.py +++ /dev/null @@ -1,102 +0,0 @@ -from pathlib import Path -import glob -import os - -from .face_detection_yunet.yunet import YuNet -from .text_recognition_crnn.crnn import CRNN -from .face_recognition_sface.sface import SFace -from .image_classification_ppresnet.ppresnet import PPResNet -from .human_segmentation_pphumanseg.pphumanseg import PPHumanSeg -from .person_detection_mediapipe.mp_persondet import MPPersonDet -from .pose_estimation_mediapipe.mp_pose import MPPose -from .qrcode_wechatqrcode.wechatqrcode import WeChatQRCode -from .person_reid_youtureid.youtureid import YoutuReID -from .image_classification_mobilenet.mobilenet import MobileNet -from .palm_detection_mediapipe.mp_palmdet import MPPalmDet -from .handpose_estimation_mediapipe.mp_handpose import MPHandPose -from .license_plate_detection_yunet.lpd_yunet import LPD_YuNet -from .object_detection_nanodet.nanodet import NanoDet -from .object_detection_yolox.yolox import YoloX -from .facial_expression_recognition.facial_fer_model import FacialExpressionRecog -from .object_tracking_vittrack.vittrack import VitTrack -from .text_detection_ppocr.ppocr_det import PPOCRDet -from .image_segmentation_efficientsam.efficientSAM import EfficientSAM - -class ModuleRegistery: - def __init__(self, name): - self._name = name - self._dict = dict() - - self._base_path = Path(__file__).parent - - def get(self, key): - ''' - Returns a tuple with: - - a module handler, - - a list of model file paths - ''' - return self._dict[key] - - def register(self, item): - ''' - Registers given module handler along with paths of model files - ''' - # search for model files - model_dir = str(self._base_path / item.__module__.split(".")[1]) - fp32_model_paths = [] - fp16_model_paths = [] - int8_model_paths = [] - int8bq_model_paths = [] - # onnx - ret_onnx = sorted(glob.glob(os.path.join(model_dir, "*.onnx"))) - if "object_tracking" in item.__module__: - # object tracking models usually have multiple parts - fp32_model_paths = [ret_onnx] - else: - for r in ret_onnx: - if "int8" in r: - int8_model_paths.append([r]) - elif "fp16" in r: # exclude fp16 for now - fp16_model_paths.append([r]) - elif "blocked" in r: - int8bq_model_paths.append([r]) - else: - fp32_model_paths.append([r]) - # caffe - ret_caffemodel = sorted(glob.glob(os.path.join(model_dir, "*.caffemodel"))) - ret_prototxt = sorted(glob.glob(os.path.join(model_dir, "*.prototxt"))) - caffe_models = [] - for caffemodel, prototxt in zip(ret_caffemodel, ret_prototxt): - caffe_models += [prototxt, caffemodel] - if caffe_models: - fp32_model_paths.append(caffe_models) - - all_model_paths = dict( - fp32=fp32_model_paths, - fp16=fp16_model_paths, - int8=int8_model_paths, - int8bq=int8bq_model_paths - ) - - self._dict[item.__name__] = (item, all_model_paths) - -MODELS = ModuleRegistery('Models') -MODELS.register(YuNet) -MODELS.register(CRNN) -MODELS.register(SFace) -MODELS.register(PPResNet) -MODELS.register(PPHumanSeg) -MODELS.register(MPPersonDet) -MODELS.register(MPPose) -MODELS.register(WeChatQRCode) -MODELS.register(YoutuReID) -MODELS.register(MobileNet) -MODELS.register(MPPalmDet) -MODELS.register(MPHandPose) -MODELS.register(LPD_YuNet) -MODELS.register(NanoDet) -MODELS.register(YoloX) -MODELS.register(FacialExpressionRecog) -MODELS.register(VitTrack) -MODELS.register(PPOCRDet) -MODELS.register(EfficientSAM) \ No newline at end of file diff --git a/models/deblurring_nafnet/CMakeLists.txt b/models/deblurring_nafnet/CMakeLists.txt deleted file mode 100644 index 63d95a06..00000000 --- a/models/deblurring_nafnet/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.22.2) -project(opencv_zoo_deblurring_nafnet) - -set(OPENCV_VERSION "5.0.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(opencv_zoo_deblurring_nafnet demo.cpp) -target_link_libraries(opencv_zoo_deblurring_nafnet ${OpenCV_LIBS}) diff --git a/models/deblurring_nafnet/LICENSE b/models/deblurring_nafnet/LICENSE deleted file mode 100644 index 50927b3b..00000000 --- a/models/deblurring_nafnet/LICENSE +++ /dev/null @@ -1,228 +0,0 @@ -MIT License - -Copyright (c) 2022 megvii-model - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - - -BasicSR -Copyright 2018-2020 BasicSR Authors - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2018-2020 BasicSR Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/deblurring_nafnet/README.md b/models/deblurring_nafnet/README.md deleted file mode 100644 index 56562d62..00000000 --- a/models/deblurring_nafnet/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# NAFNet - -NAFNet is a lightweight image deblurring model that eliminates nonlinear activations to achieve state-of-the-art performance with minimal computational cost. - -Notes: - -- Model source: [.pth](https://drive.google.com/file/d/14D4V4raNYIOhETfcuuLI3bGLB-OYIv6X/view). -- ONNX Model link: [ONNX](https://drive.google.com/uc?export=dowload&id=1ZLRhkpCekNruJZggVpBgSoCx3k7bJ-5v) - -## Requirements -Install latest OpenCV >=5.0.0 and CMake >= 3.22.2 to get started with. - -## Demo - -### Python - -Run the following command to try the demo: - -```shell -# deblur the default input image -python demo.py -# deblur the user input image -python demo.py --input /path/to/image - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# deblur the default input image -./build/demo -# deblur the user input image -./build/demo --input=/path/to/image -# get help messages -./build/demo -h -``` - -### Example outputs - -![licenseplate_motion](./example_outputs/licenseplate_motion_output.jpg) - -## License - -All files in this directory are licensed under [MIT License](./LICENSE). - -## Reference - -- https://github.com/megvii-research/NAFNet diff --git a/models/deblurring_nafnet/deblurring_nafnet_2025may.onnx b/models/deblurring_nafnet/deblurring_nafnet_2025may.onnx deleted file mode 100644 index 59f8bbfb..00000000 --- a/models/deblurring_nafnet/deblurring_nafnet_2025may.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:07263f416febecce10193dd648e950b22e397cf521eedab1a114ef77b2bc9587 -size 91736251 diff --git a/models/deblurring_nafnet/demo.cpp b/models/deblurring_nafnet/demo.cpp deleted file mode 100644 index 189920fd..00000000 --- a/models/deblurring_nafnet/demo.cpp +++ /dev/null @@ -1,89 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -using namespace cv; -using namespace cv::dnn; -using namespace std; - -class Nafnet { -public: - Nafnet(const string& modelPath) { - loadModel(modelPath); - } - - // Function to set up the input image and process it - void process(const Mat& image, Mat& result) { - Mat blob = blobFromImage(image, 0.00392, Size(image.cols, image.rows), Scalar(0, 0, 0), true, false, CV_32F); - net.setInput(blob); - Mat output = net.forward(); - postProcess(output, result); - } - -private: - Net net; - - // Load Model - void loadModel(const string modelPath) { - net = readNetFromONNX(modelPath); - net.setPreferableBackend(DNN_BACKEND_DEFAULT); - net.setPreferableTarget(DNN_TARGET_CPU); - } - - void postProcess(const Mat& output, Mat& result) { - Mat output_transposed(3, &output.size[1], CV_32F, const_cast(reinterpret_cast(output.ptr()))); - - vector channels; - for (int i = 0; i < 3; ++i) { - channels.push_back(Mat(output_transposed.size[1], output_transposed.size[2], CV_32F, - output_transposed.ptr(i))); - } - merge(channels, result); - result.convertTo(result, CV_8UC3, 255.0); - cvtColor(result, result, COLOR_RGB2BGR); - } -}; - -int main(int argc, char** argv) { - const string about = - "This sample demonstrates deblurring with nafnet deblurring model.\n\n"; - const string keys = - "{ help h | | Print help message. }" - "{ input i | example_outputs/licenseplate_motion.jpg | Path to input image.}" - "{ model | deblurring_nafnet_2025may.onnx | Path to the nafnet deblurring onnx model file }"; - - CommandLineParser parser(argc, argv, keys); - if (parser.has("help")) - { - cout << about << endl; - parser.printMessage(); - return -1; - } - - parser = CommandLineParser(argc, argv, keys); - string model = parser.get("model"); - parser.about(about); - - Mat image = imread(parser.get("input")); - if (image.empty()) { - cerr << "Error: Input image could not be loaded." << endl; - return -1; - } - - // Create an instance of Dexined - Nafnet nafnet(model); - - Mat result; - nafnet.process(image, result); - - imshow("Input", image); - imshow("Output", result); - waitKey(0); - - destroyAllWindows(); - return 0; -} diff --git a/models/deblurring_nafnet/demo.py b/models/deblurring_nafnet/demo.py deleted file mode 100644 index 61b010ab..00000000 --- a/models/deblurring_nafnet/demo.py +++ /dev/null @@ -1,41 +0,0 @@ -import cv2 as cv -import argparse - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, [p.split('-')[0] for p in str_version.split('.')])) -assert opencv_python_version(cv.__version__) >= opencv_python_version("5.0.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from nafnet import Nafnet - -def get_args_parser(func_args): - parser = argparse.ArgumentParser(add_help=False) - parser.add_argument('--input', help='Path to input image.', default='example_outputs/licenseplate_motion.jpg', required=False) - parser.add_argument('--model', help='Path to nafnet deblurring onnx model', default='deblurring_nafnet_2025may.onnx', required=False) - - args, _ = parser.parse_known_args() - parser = argparse.ArgumentParser(parents=[parser], - description='', formatter_class=argparse.RawTextHelpFormatter) - return parser.parse_args(func_args) - -def main(func_args=None): - args = get_args_parser(func_args) - - nafnet = Nafnet(modelPath=args.model) - - input_image = cv.imread(args.input) - - tm = cv.TickMeter() - tm.start() - result = nafnet.infer(input_image) - tm.stop() - label = 'Inference time: {:.2f} ms'.format(tm.getTimeMilli()) - cv.putText(result, label, (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 0)) - - cv.imshow("Input image", input_image) - cv.imshow("Output image", result) - cv.waitKey(0) - cv.destroyAllWindows() - -if __name__ == '__main__': - main() diff --git a/models/deblurring_nafnet/example_outputs/licenseplate_motion.jpg b/models/deblurring_nafnet/example_outputs/licenseplate_motion.jpg deleted file mode 100644 index e55efcc2..00000000 --- a/models/deblurring_nafnet/example_outputs/licenseplate_motion.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:571b74dde1f171fc3a502c4f412c1b88faaf393f5530673bde67c4e76ec27273 -size 57018 diff --git a/models/deblurring_nafnet/example_outputs/licenseplate_motion_output.jpg b/models/deblurring_nafnet/example_outputs/licenseplate_motion_output.jpg deleted file mode 100644 index 35ee6b84..00000000 --- a/models/deblurring_nafnet/example_outputs/licenseplate_motion_output.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a5d12b85e2394313d1bb16939927df5b904521ef866727b7a447c266e9377dcc -size 67677 diff --git a/models/deblurring_nafnet/nafnet.py b/models/deblurring_nafnet/nafnet.py deleted file mode 100644 index 245a9b97..00000000 --- a/models/deblurring_nafnet/nafnet.py +++ /dev/null @@ -1,36 +0,0 @@ -import cv2 as cv -import numpy as np - -class Nafnet: - def __init__(self, modelPath='deblurring_nafnet_2025may.onnx', backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - - # Load the model - self._model = cv.dnn.readNetFromONNX(self._modelPath) - self.setBackendAndTarget(self._backendId, self._targetId) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def infer(self, image): - image_blob = cv.dnn.blobFromImage(image, 0.00392, (image.shape[1], image.shape[0]), (0,0,0), True, False) - - self._model.setInput(image_blob) - output = self._model.forward() - - # Postprocessing - result = output[0] - result = np.transpose(result, (1, 2, 0)) - result = np.clip(result * 255.0, 0, 255).astype(np.uint8) - result = cv.cvtColor(result, cv.COLOR_RGB2BGR) - - return result diff --git a/models/edge_detection_dexined/CMakeLists.txt b/models/edge_detection_dexined/CMakeLists.txt deleted file mode 100644 index 64c483b4..00000000 --- a/models/edge_detection_dexined/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.22.2) -project(opencv_zoo_edge_detection_dexined) - -set(OPENCV_VERSION "5.0.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(edge_detection edge_detection.cpp) -target_link_libraries(edge_detection ${OpenCV_LIBS}) diff --git a/models/edge_detection_dexined/LICENSE b/models/edge_detection_dexined/LICENSE deleted file mode 100644 index 1caaa72f..00000000 --- a/models/edge_detection_dexined/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Xavier Soria Poma - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/models/edge_detection_dexined/README.md b/models/edge_detection_dexined/README.md deleted file mode 100644 index fadf8914..00000000 --- a/models/edge_detection_dexined/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# DexiNed - -DexiNed is a Convolutional Neural Network (CNN) architecture for edge detection. - -Notes: - -- Model source: [ONNX](https://drive.google.com/file/d/1u_qXqXqaIP_SqdGaq4CbZyjzkZb02XTs/view). -- Model source: [.pth](https://drive.google.com/file/d/1V56vGTsu7GYiQouCIKvTWl5UKCZ6yCNu/view). -- This ONNX model has fixed input shape, but OpenCV DNN infers on the exact shape of input image. See https://github.com/opencv/opencv_zoo/issues/44 for more information. - -## Requirements -Install latest OpenCV >=5.0.0 and CMake >= 3.22.2 to get started with. - -## Demo - -### Python - -Run the following command to try the demo: - -```shell -# detect on camera input -python demo.py -# detect on an image -python demo.py --input /path/to/image - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/demo -# detect on an image -./build/demo --input=/path/to/image -# get help messages -./build/demo -h -``` - -### Example outputs - -![chicky](./example_outputs/chicky_output.jpg) - -## License - -All files in this directory are licensed under [MIT License](./LICENSE). - -## Reference - -- https://github.com/xavysp/DexiNed \ No newline at end of file diff --git a/models/edge_detection_dexined/demo.cpp b/models/edge_detection_dexined/demo.cpp deleted file mode 100644 index 66dfc4f5..00000000 --- a/models/edge_detection_dexined/demo.cpp +++ /dev/null @@ -1,138 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -using namespace cv; -using namespace cv::dnn; -using namespace std; - -class Dexined { -public: - Dexined(const string& modelPath) { - loadModel(modelPath); - } - - // Function to set up the input image and process it - void processFrame(const Mat& image, Mat& result) { - Mat blob = blobFromImage(image, 1.0, Size(512, 512), Scalar(103.5, 116.2, 123.6), false, false, CV_32F); - net.setInput(blob); - applyDexined(image, result); - } - -private: - Net net; - - // Load Model - void loadModel(const string modelPath) { - net = readNetFromONNX(modelPath); - net.setPreferableBackend(DNN_BACKEND_DEFAULT); - net.setPreferableTarget(DNN_TARGET_CPU); - } - - // Function to apply sigmoid activation - static void sigmoid(Mat& input) { - exp(-input, input); // e^-input - input = 1.0 / (1.0 + input); // 1 / (1 + e^-input) - } - - // Function to process the neural network output to generate edge maps - static pair postProcess(const vector& output, int height, int width) { - vector preds; - preds.reserve(output.size()); - for (const Mat &p : output) { - Mat img; - Mat processed; - if (p.dims == 4 && p.size[0] == 1 && p.size[1] == 1) { - processed = p.reshape(0, {p.size[2], p.size[3]}); - } else { - processed = p.clone(); - } - sigmoid(processed); - normalize(processed, img, 0, 255, NORM_MINMAX, CV_8U); - resize(img, img, Size(width, height)); - preds.push_back(img); - } - Mat fuse = preds.back(); - Mat ave = Mat::zeros(height, width, CV_32F); - for (Mat &pred : preds) { - Mat temp; - pred.convertTo(temp, CV_32F); - ave += temp; - } - ave /= static_cast(preds.size()); - ave.convertTo(ave, CV_8U); - return {fuse, ave}; - } - - // Function to apply the Dexined model - void applyDexined(const Mat& image, Mat& result) { - int originalWidth = image.cols; - int originalHeight = image.rows; - vector outputs; - net.forward(outputs); - pair res = postProcess(outputs, originalHeight, originalWidth); - result = res.first; // or res.second for average edge map - } -}; - -int main(int argc, char** argv) { - const string about = - "This sample demonstrates edge detection with dexined edge detection techniques.\n\n"; - const string keys = - "{ help h | | Print help message. }" - "{ input i | | Path to input image or video file. Skip this argument to capture frames from a camera.}" - "{ model | edge_detection_dexined_2024sep.onnx | Path to the dexined.onnx model file }"; - - CommandLineParser parser(argc, argv, keys); - if (parser.has("help")) - { - cout << about << endl; - parser.printMessage(); - return -1; - } - - parser = CommandLineParser(argc, argv, keys); - string model = parser.get("model"); - parser.about(about); - - VideoCapture cap; - if (parser.has("input")) - cap.open(samples::findFile(parser.get("input"))); - else - cap.open(0); - - namedWindow("Input", WINDOW_AUTOSIZE); - namedWindow("Output", WINDOW_AUTOSIZE); - moveWindow("Output", 200, 0); - - // Create an instance of Dexined - Dexined dexined(model); - Mat image; - - for (;;){ - cap >> image; - if (image.empty()) - { - cout << "Press any key to exit" << endl; - waitKey(); - break; - } - - Mat result; - dexined.processFrame(image, result); - - imshow("Input", image); - imshow("Output", result); - int key = waitKey(1); - if (key == 27 || key == 'q') - { - break; - } - } - destroyAllWindows(); - return 0; -} diff --git a/models/edge_detection_dexined/demo.py b/models/edge_detection_dexined/demo.py deleted file mode 100644 index ffc6992f..00000000 --- a/models/edge_detection_dexined/demo.py +++ /dev/null @@ -1,51 +0,0 @@ -import cv2 as cv -import argparse -from dexined import Dexined - -def get_args_parser(func_args): - parser = argparse.ArgumentParser(add_help=False) - parser.add_argument('--input', help='Path to input image or video file. Skip this argument to capture frames from a camera.', default=0, required=False) - parser.add_argument('--model', help='Path to dexined.onnx', default='edge_detection_dexined_2024sep.onnx', required=False) - - args, _ = parser.parse_known_args() - parser = argparse.ArgumentParser(parents=[parser], - description='', formatter_class=argparse.RawTextHelpFormatter) - return parser.parse_args(func_args) - -def main(func_args=None): - args = get_args_parser(func_args) - - dexined = Dexined(modelPath=args.model) - - # Open video or capture from camera - cap = cv.VideoCapture(cv.samples.findFile(args.input) if args.input else 0) - if not cap.isOpened(): - print("Failed to open the input video") - exit(-1) - - cv.namedWindow('Input', cv.WINDOW_AUTOSIZE) - cv.namedWindow('Output', cv.WINDOW_AUTOSIZE) - cv.moveWindow('Output', 200, 50) - - # Process frames - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, image = cap.read() - if not hasFrame: - print("Press any key to exit") - cv.waitKey(0) - break - - tm.start() - result = dexined.infer(image) - tm.stop() - label = 'Inference time: {:.2f} ms, FPS: {:.2f}'.format(tm.getTimeMilli(), tm.getFPS()) - - cv.imshow("Input", image) - cv.putText(result, label, (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255)) - cv.imshow("Output", result) - - cv.destroyAllWindows() - -if __name__ == '__main__': - main() diff --git a/models/edge_detection_dexined/dexined.py b/models/edge_detection_dexined/dexined.py deleted file mode 100644 index 9e4e7668..00000000 --- a/models/edge_detection_dexined/dexined.py +++ /dev/null @@ -1,50 +0,0 @@ -import cv2 as cv -import numpy as np - -class Dexined: - def __init__(self, modelPath='edge_detection_dexined_2024sep.onnx', backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - - # Load the model - self._model = cv.dnn.readNetFromONNX(self._modelPath) - self.setBackendAndTarget(self._backendId, self._targetId) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - @staticmethod - def sigmoid(x): - return 1.0 / (1.0 + np.exp(-x)) - - def postProcessing(self, output, shape): - h, w = shape - preds = [] - for p in output: - img = self.sigmoid(p) - img = np.squeeze(img) - img = cv.normalize(img, None, 0, 255, cv.NORM_MINMAX, cv.CV_8U) - img = cv.resize(img, (w, h)) - preds.append(img) - fuse = preds[-1] - ave = np.array(preds, dtype=np.float32) - ave = np.uint8(np.mean(ave, axis=0)) - return fuse, ave - - def infer(self, image): - inp = cv.dnn.blobFromImage(image, 1.0, (512, 512), (103.5, 116.2, 123.6), swapRB=False, crop=False) - self._model.setInput(inp) - - # Forward pass through the model - out = self._model.forward() - result, _ = self.postProcessing(out, image.shape[:2]) - - return result diff --git a/models/edge_detection_dexined/edge_detection_dexined_2024sep.onnx b/models/edge_detection_dexined/edge_detection_dexined_2024sep.onnx deleted file mode 100644 index f573283e..00000000 --- a/models/edge_detection_dexined/edge_detection_dexined_2024sep.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a50d01dc8481549c7dedb9eb3e0123b810a016520df75e4669a504609982cdd0 -size 47235563 diff --git a/models/edge_detection_dexined/example_outputs/chicky.jpg b/models/edge_detection_dexined/example_outputs/chicky.jpg deleted file mode 100644 index 46170923..00000000 --- a/models/edge_detection_dexined/example_outputs/chicky.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:37ed3af84d13bd6cffe0ae282692a382021f21df15f8bd5cf5308c14e49bd754 -size 125551 diff --git a/models/edge_detection_dexined/example_outputs/chicky_output.jpg b/models/edge_detection_dexined/example_outputs/chicky_output.jpg deleted file mode 100644 index b0904f03..00000000 --- a/models/edge_detection_dexined/example_outputs/chicky_output.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b229c89f5b0517922795b9f34ef4d654dd8bbc5d5f4fdfb12874bf63f383bcda -size 77596 diff --git a/models/face_detection_yunet/CMakeLists.txt b/models/face_detection_yunet/CMakeLists.txt deleted file mode 100644 index 68ebadcd..00000000 --- a/models/face_detection_yunet/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.24.0) -project(opencv_zoo_face_detection_yunet) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(demo demo.cpp) -target_link_libraries(demo ${OpenCV_LIBS}) diff --git a/models/face_detection_yunet/LICENSE b/models/face_detection_yunet/LICENSE deleted file mode 100644 index 4cdf89a4..00000000 --- a/models/face_detection_yunet/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Shiqi Yu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/models/face_detection_yunet/README.md b/models/face_detection_yunet/README.md index fda37c8d..ae47da50 100644 --- a/models/face_detection_yunet/README.md +++ b/models/face_detection_yunet/README.md @@ -11,6 +11,9 @@ Notes: - `face_detection_yunet_2023mar_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - Paper source: [Yunet: A tiny millisecond-level face detector](https://link.springer.com/article/10.1007/s11633-023-1423-y). +### Download +Model file: [face_detection_yunet_2023mar.onnx](https://huggingface.co/opencv/opencv_zoo/resolve/main/models/face_detection_yunet/face_detection_yunet_2023mar.onnx) + Results of accuracy evaluation with [tools/eval](../../tools/eval). | Models | Easy AP | Medium AP | Hard AP | diff --git a/models/face_detection_yunet/demo.cpp b/models/face_detection_yunet/demo.cpp deleted file mode 100644 index 45ebb747..00000000 --- a/models/face_detection_yunet/demo.cpp +++ /dev/null @@ -1,213 +0,0 @@ -#include "opencv2/opencv.hpp" - -#include -#include -#include -#include - -const std::map str2backend{ - {"opencv", cv::dnn::DNN_BACKEND_OPENCV}, {"cuda", cv::dnn::DNN_BACKEND_CUDA}, - {"timvx", cv::dnn::DNN_BACKEND_TIMVX}, {"cann", cv::dnn::DNN_BACKEND_CANN} -}; -const std::map str2target{ - {"cpu", cv::dnn::DNN_TARGET_CPU}, {"cuda", cv::dnn::DNN_TARGET_CUDA}, - {"npu", cv::dnn::DNN_TARGET_NPU}, {"cuda_fp16", cv::dnn::DNN_TARGET_CUDA_FP16} -}; - -class YuNet -{ -public: - YuNet(const std::string& model_path, - const cv::Size& input_size = cv::Size(320, 320), - float conf_threshold = 0.6f, - float nms_threshold = 0.3f, - int top_k = 5000, - int backend_id = 0, - int target_id = 0) - : model_path_(model_path), input_size_(input_size), - conf_threshold_(conf_threshold), nms_threshold_(nms_threshold), - top_k_(top_k), backend_id_(backend_id), target_id_(target_id) - { - model = cv::FaceDetectorYN::create(model_path_, "", input_size_, conf_threshold_, nms_threshold_, top_k_, backend_id_, target_id_); - } - - /* Overwrite the input size when creating the model. Size format: [Width, Height]. - */ - void setInputSize(const cv::Size& input_size) - { - input_size_ = input_size; - model->setInputSize(input_size_); - } - - cv::Mat infer(const cv::Mat image) - { - cv::Mat res; - model->detect(image, res); - return res; - } - -private: - cv::Ptr model; - - std::string model_path_; - cv::Size input_size_; - float conf_threshold_; - float nms_threshold_; - int top_k_; - int backend_id_; - int target_id_; -}; - -cv::Mat visualize(const cv::Mat& image, const cv::Mat& faces, float fps = -1.f) -{ - static cv::Scalar box_color{0, 255, 0}; - static std::vector landmark_color{ - cv::Scalar(255, 0, 0), // right eye - cv::Scalar( 0, 0, 255), // left eye - cv::Scalar( 0, 255, 0), // nose tip - cv::Scalar(255, 0, 255), // right mouth corner - cv::Scalar( 0, 255, 255) // left mouth corner - }; - static cv::Scalar text_color{0, 255, 0}; - - auto output_image = image.clone(); - - if (fps >= 0) - { - cv::putText(output_image, cv::format("FPS: %.2f", fps), cv::Point(0, 15), cv::FONT_HERSHEY_SIMPLEX, 0.5, text_color, 2); - } - - for (int i = 0; i < faces.rows; ++i) - { - // Draw bounding boxes - int x1 = static_cast(faces.at(i, 0)); - int y1 = static_cast(faces.at(i, 1)); - int w = static_cast(faces.at(i, 2)); - int h = static_cast(faces.at(i, 3)); - cv::rectangle(output_image, cv::Rect(x1, y1, w, h), box_color, 2); - - // Confidence as text - float conf = faces.at(i, 14); - cv::putText(output_image, cv::format("%.4f", conf), cv::Point(x1, y1+12), cv::FONT_HERSHEY_DUPLEX, 0.5, text_color); - - // Draw landmarks - for (int j = 0; j < landmark_color.size(); ++j) - { - int x = static_cast(faces.at(i, 2*j+4)), y = static_cast(faces.at(i, 2*j+5)); - cv::circle(output_image, cv::Point(x, y), 2, landmark_color[j], 2); - } - } - return output_image; -} - -int main(int argc, char** argv) -{ - cv::CommandLineParser parser(argc, argv, - "{help h | | Print this message}" - "{input i | | Set input to a certain image, omit if using camera}" - "{model m | face_detection_yunet_2023mar.onnx | Set path to the model}" - "{backend b | opencv | Set DNN backend}" - "{target t | cpu | Set DNN target}" - "{save s | false | Whether to save result image or not}" - "{vis v | false | Whether to visualize result image or not}" - /* model params below*/ - "{conf_threshold | 0.9 | Set the minimum confidence for the model to identify a face. Filter out faces of conf < conf_threshold}" - "{nms_threshold | 0.3 | Set the threshold to suppress overlapped boxes. Suppress boxes if IoU(box1, box2) >= nms_threshold, the one of higher score is kept.}" - "{top_k | 5000 | Keep top_k bounding boxes before NMS. Set a lower value may help speed up postprocessing.}" - ); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - std::string input_path = parser.get("input"); - std::string model_path = parser.get("model"); - std::string backend = parser.get("backend"); - std::string target = parser.get("target"); - bool save_flag = parser.get("save"); - bool vis_flag = parser.get("vis"); - - // model params - float conf_threshold = parser.get("conf_threshold"); - float nms_threshold = parser.get("nms_threshold"); - int top_k = parser.get("top_k"); - const int backend_id = str2backend.at(backend); - const int target_id = str2target.at(target); - - // Instantiate YuNet - YuNet model(model_path, cv::Size(320, 320), conf_threshold, nms_threshold, top_k, backend_id, target_id); - - // If input is an image - if (!input_path.empty()) - { - auto image = cv::imread(input_path); - - // Inference - model.setInputSize(image.size()); - auto faces = model.infer(image); - - // Print faces - std::cout << cv::format("%d faces detected:\n", faces.rows); - for (int i = 0; i < faces.rows; ++i) - { - int x1 = static_cast(faces.at(i, 0)); - int y1 = static_cast(faces.at(i, 1)); - int w = static_cast(faces.at(i, 2)); - int h = static_cast(faces.at(i, 3)); - float conf = faces.at(i, 14); - std::cout << cv::format("%d: x1=%d, y1=%d, w=%d, h=%d, conf=%.4f\n", i, x1, y1, w, h, conf); - } - - // Draw reults on the input image - if (save_flag || vis_flag) - { - auto res_image = visualize(image, faces); - if (save_flag) - { - std::cout << "Results are saved to result.jpg\n"; - cv::imwrite("result.jpg", res_image); - } - if (vis_flag) - { - cv::namedWindow(input_path, cv::WINDOW_AUTOSIZE); - cv::imshow(input_path, res_image); - cv::waitKey(0); - } - } - } - else // Call default camera - { - int device_id = 0; - auto cap = cv::VideoCapture(device_id); - int w = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); - int h = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); - model.setInputSize(cv::Size(w, h)); - - auto tick_meter = cv::TickMeter(); - cv::Mat frame; - while (cv::waitKey(1) < 0) - { - bool has_frame = cap.read(frame); - if (!has_frame) - { - std::cout << "No frames grabbed! Exiting ...\n"; - break; - } - - // Inference - tick_meter.start(); - cv::Mat faces = model.infer(frame); - tick_meter.stop(); - - // Draw results on the input image - auto res_image = visualize(frame, faces, (float)tick_meter.getFPS()); - // Visualize in a new window - cv::imshow("YuNet Demo", res_image); - - tick_meter.reset(); - } - } - - return 0; -} diff --git a/models/face_detection_yunet/demo.py b/models/face_detection_yunet/demo.py deleted file mode 100644 index d33a9db5..00000000 --- a/models/face_detection_yunet/demo.py +++ /dev/null @@ -1,146 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from yunet import YuNet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='YuNet: A Fast and Accurate CNN-based Face Detector (https://github.com/ShiqiYu/libfacedetection).') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set input to a certain image, omit if using camera.') -parser.add_argument('--model', '-m', type=str, default='face_detection_yunet_2023mar.onnx', - help="Usage: Set model type, defaults to 'face_detection_yunet_2023mar.onnx'.") -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--conf_threshold', type=float, default=0.9, - help='Usage: Set the minimum needed confidence for the model to identify a face, defauts to 0.9. Smaller values may result in faster detection, but will limit accuracy. Filter out faces of confidence < conf_threshold.') -parser.add_argument('--nms_threshold', type=float, default=0.3, - help='Usage: Suppress bounding boxes of iou >= nms_threshold. Default = 0.3.') -parser.add_argument('--top_k', type=int, default=5000, - help='Usage: Keep top_k bounding boxes before NMS.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(image, results, box_color=(0, 255, 0), text_color=(0, 0, 255), fps=None): - output = image.copy() - landmark_color = [ - (255, 0, 0), # right eye - ( 0, 0, 255), # left eye - ( 0, 255, 0), # nose tip - (255, 0, 255), # right mouth corner - ( 0, 255, 255) # left mouth corner - ] - - if fps is not None: - cv.putText(output, 'FPS: {:.2f}'.format(fps), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, text_color) - - for det in results: - bbox = det[0:4].astype(np.int32) - cv.rectangle(output, (bbox[0], bbox[1]), (bbox[0]+bbox[2], bbox[1]+bbox[3]), box_color, 2) - - conf = det[-1] - cv.putText(output, '{:.4f}'.format(conf), (bbox[0], bbox[1]+12), cv.FONT_HERSHEY_DUPLEX, 0.5, text_color) - - landmarks = det[4:14].astype(np.int32).reshape((5,2)) - for idx, landmark in enumerate(landmarks): - cv.circle(output, landmark, 2, landmark_color[idx], 2) - - return output - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate YuNet - model = YuNet(modelPath=args.model, - inputSize=[320, 320], - confThreshold=args.conf_threshold, - nmsThreshold=args.nms_threshold, - topK=args.top_k, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - image = cv.imread(args.input) - h, w, _ = image.shape - - # Inference - model.setInputSize([w, h]) - results = model.infer(image) - - # Print results - print('{} faces detected.'.format(results.shape[0])) - for idx, det in enumerate(results): - print('{}: {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f}'.format( - idx, *det[:-1]) - ) - - # Draw results on the input image - image = visualize(image, results) - - # Save results if save is true - if args.save: - print('Resutls saved to result.jpg\n') - cv.imwrite('result.jpg', image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - w = int(cap.get(cv.CAP_PROP_FRAME_WIDTH)) - h = int(cap.get(cv.CAP_PROP_FRAME_HEIGHT)) - model.setInputSize([w, h]) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # Inference - tm.start() - results = model.infer(frame) # results is a tuple - tm.stop() - - # Draw results on the input image - frame = visualize(frame, results, fps=tm.getFPS()) - - # Visualize results in a new Window - cv.imshow('YuNet Demo', frame) - - tm.reset() diff --git a/models/face_detection_yunet/example_outputs/largest_selfie.jpg b/models/face_detection_yunet/example_outputs/largest_selfie.jpg deleted file mode 100644 index fe494914..00000000 --- a/models/face_detection_yunet/example_outputs/largest_selfie.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ab8413ad9bb4f53068f4fb63c6747e5989991dd02241c923d5595b614ecf2bf6 -size 1147146 diff --git a/models/face_detection_yunet/example_outputs/yunet_demo.gif b/models/face_detection_yunet/example_outputs/yunet_demo.gif deleted file mode 100644 index 099beab6..00000000 --- a/models/face_detection_yunet/example_outputs/yunet_demo.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:db90459c308b14dd423014eabf3253f5f6147fbe7906e81429a7a88c8dbe7b8c -size 661072 diff --git a/models/face_detection_yunet/face_detection_yunet_2023mar.onnx b/models/face_detection_yunet/face_detection_yunet_2023mar.onnx deleted file mode 100644 index 2d8804a5..00000000 --- a/models/face_detection_yunet/face_detection_yunet_2023mar.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8f2383e4dd3cfbb4553ea8718107fc0423210dc964f9f4280604804ed2552fa4 -size 232589 diff --git a/models/face_detection_yunet/face_detection_yunet_2023mar_int8.onnx b/models/face_detection_yunet/face_detection_yunet_2023mar_int8.onnx deleted file mode 100644 index c10540eb..00000000 --- a/models/face_detection_yunet/face_detection_yunet_2023mar_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:321aa5a6afabf7ecc46a3d06bfab2b579dc96eb5c3be7edd365fa04502ad9294 -size 100416 diff --git a/models/face_detection_yunet/face_detection_yunet_2023mar_int8bq.onnx b/models/face_detection_yunet/face_detection_yunet_2023mar_int8bq.onnx deleted file mode 100644 index 5778d83e..00000000 --- a/models/face_detection_yunet/face_detection_yunet_2023mar_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:49f000ec501fef24739071fc7e68267d32209045b6822c0c72dce1da25726f10 -size 122489 diff --git a/models/face_detection_yunet/yunet.py b/models/face_detection_yunet/yunet.py deleted file mode 100644 index 710d24b8..00000000 --- a/models/face_detection_yunet/yunet.py +++ /dev/null @@ -1,55 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -from itertools import product - -import numpy as np -import cv2 as cv - -class YuNet: - def __init__(self, modelPath, inputSize=[320, 320], confThreshold=0.6, nmsThreshold=0.3, topK=5000, backendId=0, targetId=0): - self._modelPath = modelPath - self._inputSize = tuple(inputSize) # [w, h] - self._confThreshold = confThreshold - self._nmsThreshold = nmsThreshold - self._topK = topK - self._backendId = backendId - self._targetId = targetId - - self._model = cv.FaceDetectorYN.create( - model=self._modelPath, - config="", - input_size=self._inputSize, - score_threshold=self._confThreshold, - nms_threshold=self._nmsThreshold, - top_k=self._topK, - backend_id=self._backendId, - target_id=self._targetId) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model = cv.FaceDetectorYN.create( - model=self._modelPath, - config="", - input_size=self._inputSize, - score_threshold=self._confThreshold, - nms_threshold=self._nmsThreshold, - top_k=self._topK, - backend_id=self._backendId, - target_id=self._targetId) - - def setInputSize(self, input_size): - self._model.setInputSize(tuple(input_size)) - - def infer(self, image): - # Forward - faces = self._model.detect(image) - return np.empty(shape=(0, 5)) if faces[1] is None else faces[1] diff --git a/models/face_image_quality_assessment_ediffiqa/LICENSE b/models/face_image_quality_assessment_ediffiqa/LICENSE deleted file mode 100644 index 4ea99c21..00000000 --- a/models/face_image_quality_assessment_ediffiqa/LICENSE +++ /dev/null @@ -1,395 +0,0 @@ -Attribution 4.0 International - -======================================================================= - -Creative Commons Corporation ("Creative Commons") is not a law firm and -does not provide legal services or legal advice. Distribution of -Creative Commons public licenses does not create a lawyer-client or -other relationship. Creative Commons makes its licenses and related -information available on an "as-is" basis. Creative Commons gives no -warranties regarding its licenses, any material licensed under their -terms and conditions, or any related information. Creative Commons -disclaims all liability for damages resulting from their use to the -fullest extent possible. - -Using Creative Commons Public Licenses - -Creative Commons public licenses provide a standard set of terms and -conditions that creators and other rights holders may use to share -original works of authorship and other material subject to copyright -and certain other rights specified in the public license below. The -following considerations are for informational purposes only, are not -exhaustive, and do not form part of our licenses. - - Considerations for licensors: Our public licenses are - intended for use by those authorized to give the public - permission to use material in ways otherwise restricted by - copyright and certain other rights. Our licenses are - irrevocable. Licensors should read and understand the terms - and conditions of the license they choose before applying it. - Licensors should also secure all rights necessary before - applying our licenses so that the public can reuse the - material as expected. Licensors should clearly mark any - material not subject to the license. This includes other CC- - licensed material, or material used under an exception or - limitation to copyright. More considerations for licensors: - wiki.creativecommons.org/Considerations_for_licensors - - Considerations for the public: By using one of our public - licenses, a licensor grants the public permission to use the - licensed material under specified terms and conditions. If - the licensor's permission is not necessary for any reason--for - example, because of any applicable exception or limitation to - copyright--then that use is not regulated by the license. Our - licenses grant only permissions under copyright and certain - other rights that a licensor has authority to grant. Use of - the licensed material may still be restricted for other - reasons, including because others have copyright or other - rights in the material. A licensor may make special requests, - such as asking that all changes be marked or described. - Although not required by our licenses, you are encouraged to - respect those requests where reasonable. More considerations - for the public: - wiki.creativecommons.org/Considerations_for_licensees - -======================================================================= - -Creative Commons Attribution 4.0 International Public License - -By exercising the Licensed Rights (defined below), You accept and agree -to be bound by the terms and conditions of this Creative Commons -Attribution 4.0 International Public License ("Public License"). To the -extent this Public License may be interpreted as a contract, You are -granted the Licensed Rights in consideration of Your acceptance of -these terms and conditions, and the Licensor grants You such rights in -consideration of benefits the Licensor receives from making the -Licensed Material available under these terms and conditions. - - -Section 1 -- Definitions. - - a. Adapted Material means material subject to Copyright and Similar - Rights that is derived from or based upon the Licensed Material - and in which the Licensed Material is translated, altered, - arranged, transformed, or otherwise modified in a manner requiring - permission under the Copyright and Similar Rights held by the - Licensor. For purposes of this Public License, where the Licensed - Material is a musical work, performance, or sound recording, - Adapted Material is always produced where the Licensed Material is - synched in timed relation with a moving image. - - b. Adapter's License means the license You apply to Your Copyright - and Similar Rights in Your contributions to Adapted Material in - accordance with the terms and conditions of this Public License. - - c. Copyright and Similar Rights means copyright and/or similar rights - closely related to copyright including, without limitation, - performance, broadcast, sound recording, and Sui Generis Database - Rights, without regard to how the rights are labeled or - categorized. For purposes of this Public License, the rights - specified in Section 2(b)(1)-(2) are not Copyright and Similar - Rights. - - d. Effective Technological Measures means those measures that, in the - absence of proper authority, may not be circumvented under laws - fulfilling obligations under Article 11 of the WIPO Copyright - Treaty adopted on December 20, 1996, and/or similar international - agreements. - - e. Exceptions and Limitations means fair use, fair dealing, and/or - any other exception or limitation to Copyright and Similar Rights - that applies to Your use of the Licensed Material. - - f. Licensed Material means the artistic or literary work, database, - or other material to which the Licensor applied this Public - License. - - g. Licensed Rights means the rights granted to You subject to the - terms and conditions of this Public License, which are limited to - all Copyright and Similar Rights that apply to Your use of the - Licensed Material and that the Licensor has authority to license. - - h. Licensor means the individual(s) or entity(ies) granting rights - under this Public License. - - i. Share means to provide material to the public by any means or - process that requires permission under the Licensed Rights, such - as reproduction, public display, public performance, distribution, - dissemination, communication, or importation, and to make material - available to the public including in ways that members of the - public may access the material from a place and at a time - individually chosen by them. - - j. Sui Generis Database Rights means rights other than copyright - resulting from Directive 96/9/EC of the European Parliament and of - the Council of 11 March 1996 on the legal protection of databases, - as amended and/or succeeded, as well as other essentially - equivalent rights anywhere in the world. - - k. You means the individual or entity exercising the Licensed Rights - under this Public License. Your has a corresponding meaning. - - -Section 2 -- Scope. - - a. License grant. - - 1. Subject to the terms and conditions of this Public License, - the Licensor hereby grants You a worldwide, royalty-free, - non-sublicensable, non-exclusive, irrevocable license to - exercise the Licensed Rights in the Licensed Material to: - - a. reproduce and Share the Licensed Material, in whole or - in part; and - - b. produce, reproduce, and Share Adapted Material. - - 2. Exceptions and Limitations. For the avoidance of doubt, where - Exceptions and Limitations apply to Your use, this Public - License does not apply, and You do not need to comply with - its terms and conditions. - - 3. Term. The term of this Public License is specified in Section - 6(a). - - 4. Media and formats; technical modifications allowed. The - Licensor authorizes You to exercise the Licensed Rights in - all media and formats whether now known or hereafter created, - and to make technical modifications necessary to do so. The - Licensor waives and/or agrees not to assert any right or - authority to forbid You from making technical modifications - necessary to exercise the Licensed Rights, including - technical modifications necessary to circumvent Effective - Technological Measures. For purposes of this Public License, - simply making modifications authorized by this Section 2(a) - (4) never produces Adapted Material. - - 5. Downstream recipients. - - a. Offer from the Licensor -- Licensed Material. Every - recipient of the Licensed Material automatically - receives an offer from the Licensor to exercise the - Licensed Rights under the terms and conditions of this - Public License. - - b. No downstream restrictions. You may not offer or impose - any additional or different terms or conditions on, or - apply any Effective Technological Measures to, the - Licensed Material if doing so restricts exercise of the - Licensed Rights by any recipient of the Licensed - Material. - - 6. No endorsement. Nothing in this Public License constitutes or - may be construed as permission to assert or imply that You - are, or that Your use of the Licensed Material is, connected - with, or sponsored, endorsed, or granted official status by, - the Licensor or others designated to receive attribution as - provided in Section 3(a)(1)(A)(i). - - b. Other rights. - - 1. Moral rights, such as the right of integrity, are not - licensed under this Public License, nor are publicity, - privacy, and/or other similar personality rights; however, to - the extent possible, the Licensor waives and/or agrees not to - assert any such rights held by the Licensor to the limited - extent necessary to allow You to exercise the Licensed - Rights, but not otherwise. - - 2. Patent and trademark rights are not licensed under this - Public License. - - 3. To the extent possible, the Licensor waives any right to - collect royalties from You for the exercise of the Licensed - Rights, whether directly or through a collecting society - under any voluntary or waivable statutory or compulsory - licensing scheme. In all other cases the Licensor expressly - reserves any right to collect such royalties. - - -Section 3 -- License Conditions. - -Your exercise of the Licensed Rights is expressly made subject to the -following conditions. - - a. Attribution. - - 1. If You Share the Licensed Material (including in modified - form), You must: - - a. retain the following if it is supplied by the Licensor - with the Licensed Material: - - i. identification of the creator(s) of the Licensed - Material and any others designated to receive - attribution, in any reasonable manner requested by - the Licensor (including by pseudonym if - designated); - - ii. a copyright notice; - - iii. a notice that refers to this Public License; - - iv. a notice that refers to the disclaimer of - warranties; - - v. a URI or hyperlink to the Licensed Material to the - extent reasonably practicable; - - b. indicate if You modified the Licensed Material and - retain an indication of any previous modifications; and - - c. indicate the Licensed Material is licensed under this - Public License, and include the text of, or the URI or - hyperlink to, this Public License. - - 2. You may satisfy the conditions in Section 3(a)(1) in any - reasonable manner based on the medium, means, and context in - which You Share the Licensed Material. For example, it may be - reasonable to satisfy the conditions by providing a URI or - hyperlink to a resource that includes the required - information. - - 3. If requested by the Licensor, You must remove any of the - information required by Section 3(a)(1)(A) to the extent - reasonably practicable. - - 4. If You Share Adapted Material You produce, the Adapter's - License You apply must not prevent recipients of the Adapted - Material from complying with this Public License. - - -Section 4 -- Sui Generis Database Rights. - -Where the Licensed Rights include Sui Generis Database Rights that -apply to Your use of the Licensed Material: - - a. for the avoidance of doubt, Section 2(a)(1) grants You the right - to extract, reuse, reproduce, and Share all or a substantial - portion of the contents of the database; - - b. if You include all or a substantial portion of the database - contents in a database in which You have Sui Generis Database - Rights, then the database in which You have Sui Generis Database - Rights (but not its individual contents) is Adapted Material; and - - c. You must comply with the conditions in Section 3(a) if You Share - all or a substantial portion of the contents of the database. - -For the avoidance of doubt, this Section 4 supplements and does not -replace Your obligations under this Public License where the Licensed -Rights include other Copyright and Similar Rights. - - -Section 5 -- Disclaimer of Warranties and Limitation of Liability. - - a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE - EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS - AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF - ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, - IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, - WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR - PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, - ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT - KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT - ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. - - b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE - TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, - NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, - INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, - COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR - USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN - ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR - DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR - IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. - - c. The disclaimer of warranties and limitation of liability provided - above shall be interpreted in a manner that, to the extent - possible, most closely approximates an absolute disclaimer and - waiver of all liability. - - -Section 6 -- Term and Termination. - - a. This Public License applies for the term of the Copyright and - Similar Rights licensed here. However, if You fail to comply with - this Public License, then Your rights under this Public License - terminate automatically. - - b. Where Your right to use the Licensed Material has terminated under - Section 6(a), it reinstates: - - 1. automatically as of the date the violation is cured, provided - it is cured within 30 days of Your discovery of the - violation; or - - 2. upon express reinstatement by the Licensor. - - For the avoidance of doubt, this Section 6(b) does not affect any - right the Licensor may have to seek remedies for Your violations - of this Public License. - - c. For the avoidance of doubt, the Licensor may also offer the - Licensed Material under separate terms or conditions or stop - distributing the Licensed Material at any time; however, doing so - will not terminate this Public License. - - d. Sections 1, 5, 6, 7, and 8 survive termination of this Public - License. - - -Section 7 -- Other Terms and Conditions. - - a. The Licensor shall not be bound by any additional or different - terms or conditions communicated by You unless expressly agreed. - - b. Any arrangements, understandings, or agreements regarding the - Licensed Material not stated herein are separate from and - independent of the terms and conditions of this Public License. - - -Section 8 -- Interpretation. - - a. For the avoidance of doubt, this Public License does not, and - shall not be interpreted to, reduce, limit, restrict, or impose - conditions on any use of the Licensed Material that could lawfully - be made without permission under this Public License. - - b. To the extent possible, if any provision of this Public License is - deemed unenforceable, it shall be automatically reformed to the - minimum extent necessary to make it enforceable. If the provision - cannot be reformed, it shall be severed from this Public License - without affecting the enforceability of the remaining terms and - conditions. - - c. No term or condition of this Public License will be waived and no - failure to comply consented to unless expressly agreed to by the - Licensor. - - d. Nothing in this Public License constitutes or may be interpreted - as a limitation upon, or waiver of, any privileges and immunities - that apply to the Licensor or You, including from the legal - processes of any jurisdiction or authority. - - -======================================================================= - -Creative Commons is not a party to its public -licenses. Notwithstanding, Creative Commons may elect to apply one of -its public licenses to material it publishes and in those instances -will be considered the “Licensor.” The text of the Creative Commons -public licenses is dedicated to the public domain under the CC0 Public -Domain Dedication. Except for the limited purpose of indicating that -material is shared under a Creative Commons public license or as -otherwise permitted by the Creative Commons policies published at -creativecommons.org/policies, Creative Commons does not authorize the -use of the trademark "Creative Commons" or any other trademark or logo -of Creative Commons without its prior written consent including, -without limitation, in connection with any unauthorized modifications -to any of its public licenses or any other arrangements, -understandings, or agreements concerning use of licensed material. For -the avoidance of doubt, this paragraph does not form part of the -public licenses. - -Creative Commons may be contacted at creativecommons.org. diff --git a/models/face_image_quality_assessment_ediffiqa/README.md b/models/face_image_quality_assessment_ediffiqa/README.md deleted file mode 100644 index 83ea05ab..00000000 --- a/models/face_image_quality_assessment_ediffiqa/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# eDifFIQA(T) - -eDifFIQA(T) is a light-weight version of the models presented in the paper [eDifFIQA: Towards Efficient Face Image Quality Assessment based on Denoising Diffusion Probabilistic Models](https://ieeexplore.ieee.org/document/10468647), it achieves state-of-the-art results in the field of face image quality assessment. - -Notes: - -- The original implementation can be found [here](https://github.com/LSIbabnikz/eDifFIQA). -- The included model combines a pretrained MobileFaceNet backbone, with a quality regression head trained using the proceedure presented in the original paper. -- The model predicts quality scores of aligned face samples, where a higher predicted score corresponds to a higher quality of the input sample. - -- In the figure below we show the quality distribution on two distinct datasets: LFW[[1]](#1) and XQLFW[[2]](#2). The LFW dataset contains images of relatively high quality, whereas the XQLFW dataset contains images of variable quality. There is a clear difference between the two distributions, with high quality images from the LFW dataset receiving quality scores higher than 0.5, while the mixed images from XQLFW receive much lower quality scores on average. - - -![qualityDist](./quality_distribution.png) - - -[1] -B. Huang, M. Ramesh, T. Berg, and E. Learned-Miller -“Labeled Faces in the Wild: A Database for Studying Face Recognition in Unconstrained Environments” -University of Massachusetts, Amherst, Tech. Rep. 07-49, -October 2007. - -[2] -M. Knoche, S. Hormann, and G. Rigoll -“Cross-Quality LFW: A Database for Analyzing Cross-Resolution Image Face Recognition in Unconstrained Environments,” in Proceedings of the IEEE International Conference on Automatic Face and Gesture Recognition (FG), 2021, pp. 1–5. - - - -## Demo - -***NOTE***: The provided demo uses [../face_detection_yunet](../face_detection_yunet) for face detection, in order to properly align the face samples, while the original implementation uses a RetinaFace(ResNet50) model, which might cause some differences between the results of the two implementations. - -To try the demo run the following commands: - - -```shell -# Assess the quality of 'image1' -python demo.py -i /path/to/image1 - -# Output all the arguments of the demo -python demo.py --help -``` - - -### Example outputs - -![ediffiqaDemo](./example_outputs/demo.jpg) - -The demo outputs the quality of the sample via terminal (print) and via image in __results.jpg__. - -## License - -All files in this directory are licensed under [CC-BY-4.0](./LICENSE). - diff --git a/models/face_image_quality_assessment_ediffiqa/demo.py b/models/face_image_quality_assessment_ediffiqa/demo.py deleted file mode 100644 index f2de4da1..00000000 --- a/models/face_image_quality_assessment_ediffiqa/demo.py +++ /dev/null @@ -1,155 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. - - -import sys -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -sys.path.append('../face_detection_yunet') -from yunet import YuNet - -from ediffiqa import eDifFIQA - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -REFERENCE_FACIAL_POINTS = [ - [38.2946 , 51.6963 ], - [73.5318 , 51.5014 ], - [56.0252 , 71.7366 ], - [41.5493 , 92.3655 ], - [70.729904, 92.2041 ] -] - -parser = argparse.ArgumentParser(description='eDifFIQA: Towards Efficient Face Image Quality Assessment based on Denoising Diffusion Probabilistic Models (https://github.com/LSIbabnikz/eDifFIQA).') -parser.add_argument('--input', '-i', type=str, default='./sample_image.jpg', - help='Usage: Set input to a certain image, defaults to "./sample_image.jpg".') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) - -ediffiqa_parser = parser.add_argument_group("eDifFIQA", " Parameters of eDifFIQA - For face image quality assessment ") -ediffiqa_parser.add_argument('--model_q', '-mq', type=str, default='ediffiqa_tiny_jun2024.onnx', - help="Usage: Set model type, defaults to 'ediffiqa_tiny_jun2024.onnx'.") - -yunet_parser = parser.add_argument_group("YuNet", " Parameters of YuNet - For face detection ") -yunet_parser.add_argument('--model_d', '-md', type=str, default='../face_detection_yunet/face_detection_yunet_2023mar.onnx', - help="Usage: Set model type, defaults to '../face_detection_yunet/face_detection_yunet_2023mar.onnx'.") -yunet_parser.add_argument('--conf_threshold', type=float, default=0.9, - help='Usage: Set the minimum needed confidence for the model to identify a face, defauts to 0.9. Smaller values may result in faster detection, but will limit accuracy. Filter out faces of confidence < conf_threshold.') -yunet_parser.add_argument('--nms_threshold', type=float, default=0.3, - help='Usage: Suppress bounding boxes of iou >= nms_threshold. Default = 0.3.') -yunet_parser.add_argument('--top_k', type=int, default=5000, - help='Usage: Keep top_k bounding boxes before NMS.') -args = parser.parse_args() - - -def visualize(image, results): - output = image.copy() - cv.putText(output, f"{results:.3f}", (0, 20), cv.FONT_HERSHEY_DUPLEX, .8, (0, 0, 255)) - - return output - - -def align_image(image, detection_data): - """ Performs face alignment on given image using the provided face landmarks (keypoints) - - Args: - image (np.array): Unaligned face image - detection_data (np.array): Detection data provided by YuNet - - Returns: - np.array: Aligned image - """ - - reference_pts = REFERENCE_FACIAL_POINTS - - ref_pts = np.float32(reference_pts) - ref_pts_shp = ref_pts.shape - - if ref_pts_shp[0] == 2: - ref_pts = ref_pts.T - - # Get source keypoints from YuNet detection data - src_pts = np.float32(detection_data[0][4:-1]).reshape(5,2) - src_pts_shp = src_pts.shape - - if src_pts_shp[0] == 2: - src_pts = src_pts.T - - tfm, _ = cv.estimateAffinePartial2D(src_pts, ref_pts, method=cv.LMEDS) - - face_img = cv.warpAffine(image, tfm, (112, 112)) - - return face_img - - -if __name__ == '__main__': - - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate eDifFIQA(T) (quality assesment) - model_quality = eDifFIQA( - modelPath=args.model_q, - inputSize=[112, 112], - ) - model_quality.setBackendAndTarget( - backendId=backend_id, - targetId=target_id - ) - - # Instantiate YuNet (face detection) - model_detect = YuNet( - modelPath=args.model_d, - inputSize=[320, 320], - confThreshold=args.conf_threshold, - nmsThreshold=args.nms_threshold, - topK=args.top_k, - backendId=backend_id, - targetId=target_id - ) - - # If input is an image - image = cv.imread(args.input) - h, w, _ = image.shape - - # Face Detection - model_detect.setInputSize([w, h]) - results_detect = model_detect.infer(image) - - assert results_detect.size != 0, f" Face could not be detected in: {args.input}. " - - # Face Alignment - aligned_image = align_image(image, results_detect) - - # Quality Assesment - quality = model_quality.infer(aligned_image) - quality = np.squeeze(quality).item() - - viz_image = visualize(aligned_image, quality) - - print(f" Quality score of {args.input}: {quality:.3f} ") - - print(f" Saving visualization to results.jpg. ") - cv.imwrite('results.jpg', viz_image) - diff --git a/models/face_image_quality_assessment_ediffiqa/ediffiqa.py b/models/face_image_quality_assessment_ediffiqa/ediffiqa.py deleted file mode 100644 index 6f91d207..00000000 --- a/models/face_image_quality_assessment_ediffiqa/ediffiqa.py +++ /dev/null @@ -1,45 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. - -import numpy as np -import cv2 as cv - - -class eDifFIQA: - - def __init__(self, modelPath, inputSize=[112, 112]): - self.modelPath = modelPath - self.inputSize = tuple(inputSize) # [w, h] - - self.model = cv.dnn.readNetFromONNX(self.modelPath) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self.model.setPreferableBackend(self._backendId) - self.model.setPreferableTarget(self._targetId) - - def infer(self, image): - # Preprocess image - image = self._preprocess(image) - # Forward - self.model.setInput(image) - quality_score = self.model.forward() - - return quality_score - - def _preprocess(self, image: cv.Mat): - # Change image from BGR to RGB - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - # Resize to (112, 112) - image = cv.resize(image, self.inputSize) - # Scale to [0, 1] and normalize by mean=0.5, std=0.5 - image = ((image / 255) - 0.5) / 0.5 - # Move channel axis - image = np.moveaxis(image[None, ...], -1, 1) - - return image diff --git a/models/face_image_quality_assessment_ediffiqa/ediffiqa_tiny_jun2024.onnx b/models/face_image_quality_assessment_ediffiqa/ediffiqa_tiny_jun2024.onnx deleted file mode 100644 index 41e32607..00000000 --- a/models/face_image_quality_assessment_ediffiqa/ediffiqa_tiny_jun2024.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9426c899cc0f01665240cb7d9e7f98e18e24e456c178326c771a43da289bfc6a -size 7272678 diff --git a/models/face_image_quality_assessment_ediffiqa/example_outputs/demo.jpg b/models/face_image_quality_assessment_ediffiqa/example_outputs/demo.jpg deleted file mode 100644 index 8cf069b6..00000000 --- a/models/face_image_quality_assessment_ediffiqa/example_outputs/demo.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:73d32e2822fcdfd8ede6184d85092f0f59db4a1ed40ad31e4ba9741b1ac5b0d3 -size 7879 diff --git a/models/face_image_quality_assessment_ediffiqa/quality_distribution.png b/models/face_image_quality_assessment_ediffiqa/quality_distribution.png deleted file mode 100644 index cc95457f..00000000 --- a/models/face_image_quality_assessment_ediffiqa/quality_distribution.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1e1774951ee1d6008f669a57674033893fd3d3809a6aaffe8628c3cf5d3c98c1 -size 19900 diff --git a/models/face_recognition_sface/CMakeLists.txt b/models/face_recognition_sface/CMakeLists.txt deleted file mode 100644 index cb1bac44..00000000 --- a/models/face_recognition_sface/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.24.0) -project(opencv_zoo_face_recognition_sface) - -set(OPENCV_VERSION "4.9.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(demo demo.cpp) -target_link_libraries(demo ${OpenCV_LIBS}) diff --git a/models/face_recognition_sface/LICENSE b/models/face_recognition_sface/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/face_recognition_sface/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/face_recognition_sface/README.md b/models/face_recognition_sface/README.md deleted file mode 100644 index fed1076e..00000000 --- a/models/face_recognition_sface/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# SFace - -SFace: Sigmoid-Constrained Hypersphere Loss for Robust Face Recognition - -Note: - -- SFace is contributed by [Yaoyao Zhong](https://github.com/zhongyy). -- Model files encode MobileFaceNet instances trained on the SFace loss function, see the [SFace paper](https://arxiv.org/abs/2205.12010) for reference. -- ONNX file conversions from [original code base](https://github.com/zhongyy/SFace) thanks to [Chengrui Wang](https://github.com/crywang). -- (As of Sep 2021) Supporting 5-landmark warping for now, see below for details. -- `face_recognition_sface_2021dec_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -Results of accuracy evaluation with [tools/eval](../../tools/eval). - -| Models | Accuracy | -| ----------- | -------- | -| SFace | 0.9940 | -| SFace block | 0.9942 | -| SFace quant | 0.9932 | - -\*: 'quant' stands for 'quantized'. -\*\*: 'block' stands for 'blockwise quantized'. - -## Demo - -***NOTE***: This demo uses [../face_detection_yunet](../face_detection_yunet) as face detector, which supports 5-landmark detection for now (2021sep). - -Run the following command to try the demo: - -### Python -```shell -# recognize on images -python demo.py --target /path/to/image1 --query /path/to/image2 - -# get help regarding various parameters -python demo.py --help -``` - -### C++ -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/demo -t=/path/to/target_face -# detect on an image -./build/demo -t=/path/to/target_face -q=/path/to/query_face -v -# get help messages -./build/demo -h -``` - -### Example outputs - -![sface demo](./example_outputs/demo.jpg) - -Note: Left part of the image is the target identity, the right part is the query. Green boxes are the same identity, red boxes are different identities compared to the left. - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- https://ieeexplore.ieee.org/document/9318547 -- https://github.com/zhongyy/SFace diff --git a/models/face_recognition_sface/demo.cpp b/models/face_recognition_sface/demo.cpp deleted file mode 100644 index 2bccbc3c..00000000 --- a/models/face_recognition_sface/demo.cpp +++ /dev/null @@ -1,322 +0,0 @@ -#include "opencv2/opencv.hpp" -#include "opencv2/core/types.hpp" - -#include -#include - -const std::vector> backend_target_pairs = { - {cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_CPU}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA_FP16}, - {cv::dnn::DNN_BACKEND_TIMVX, cv::dnn::DNN_TARGET_NPU}, - {cv::dnn::DNN_BACKEND_CANN, cv::dnn::DNN_TARGET_NPU} -}; - -class YuNet -{ - public: - YuNet(const std::string& model_path, - const cv::Size& input_size, - const float conf_threshold, - const float nms_threshold, - const int top_k, - const int backend_id, - const int target_id) - { - _detector = cv::FaceDetectorYN::create( - model_path, "", input_size, conf_threshold, nms_threshold, top_k, backend_id, target_id); - } - - void setInputSize(const cv::Size& input_size) - { - _detector->setInputSize(input_size); - } - - void setTopK(const int top_k) - { - _detector->setTopK(top_k); - } - - cv::Mat infer(const cv::Mat& image) - { - cv::Mat result; - _detector->detect(image, result); - return result; - } - - private: - cv::Ptr _detector; -}; - -class SFace -{ - public: - SFace(const std::string& model_path, - const int backend_id, - const int target_id, - const int distance_type) - : _distance_type(static_cast(distance_type)) - { - _recognizer = cv::FaceRecognizerSF::create(model_path, "", backend_id, target_id); - } - - cv::Mat extractFeatures(const cv::Mat& orig_image, const cv::Mat& face_image) - { - // Align and crop detected face from original image - cv::Mat target_aligned; - _recognizer->alignCrop(orig_image, face_image, target_aligned); - // Extract features from cropped detected face - cv::Mat target_features; - _recognizer->feature(target_aligned, target_features); - return target_features.clone(); - } - - std::pair matchFeatures(const cv::Mat& target_features, const cv::Mat& query_features) - { - const double score = _recognizer->match(target_features, query_features, _distance_type); - if (_distance_type == cv::FaceRecognizerSF::DisType::FR_COSINE) - { - return {score, score >= _threshold_cosine}; - } - return {score, score <= _threshold_norml2}; - } - - private: - cv::Ptr _recognizer; - cv::FaceRecognizerSF::DisType _distance_type; - double _threshold_cosine = 0.363; - double _threshold_norml2 = 1.128; -}; - -cv::Mat visualize(const cv::Mat& image, - const cv::Mat& faces, - const std::vector>& matches, - const float fps = -0.1F, - const cv::Size& target_size = cv::Size(512, 512)) -{ - static const cv::Scalar matched_box_color{0, 255, 0}; - static const cv::Scalar mismatched_box_color{0, 0, 255}; - - if (fps >= 0) - { - cv::Mat output_image = image.clone(); - - const int x1 = static_cast(faces.at(0, 0)); - const int y1 = static_cast(faces.at(0, 1)); - const int w = static_cast(faces.at(0, 2)); - const int h = static_cast(faces.at(0, 3)); - const auto match = matches.at(0); - - cv::Scalar box_color = match.second ? matched_box_color : mismatched_box_color; - // Draw bounding box - cv::rectangle(output_image, cv::Rect(x1, y1, w, h), box_color, 2); - // Draw match score - cv::putText(output_image, cv::format("%.4f", match.first), cv::Point(x1, y1+12), cv::FONT_HERSHEY_DUPLEX, 0.30, box_color); - // Draw FPS - cv::putText(output_image, cv::format("FPS: %.2f", fps), cv::Point(0, 15), cv::FONT_HERSHEY_SIMPLEX, 0.5, box_color, 2); - - return output_image; - } - - cv::Mat output_image = cv::Mat::zeros(target_size, CV_8UC3); - - // Determine new height and width of image with aspect ratio of original image - const double ratio = std::min(static_cast(target_size.height) / image.rows, - static_cast(target_size.width) / image.cols); - const int new_height = static_cast(image.rows * ratio); - const int new_width = static_cast(image.cols * ratio); - - // Resize the original image, maintaining aspect ratio - cv::Mat resize_out; - cv::resize(image, resize_out, cv::Size(new_width, new_height), cv::INTER_LINEAR); - - // Determine top left corner in resized dimensions - const int top = std::max(0, target_size.height - new_height) / 2; - const int left = std::max(0, target_size.width - new_width) / 2; - - // Copy resized image into target output image - const cv::Rect roi = cv::Rect(cv::Point(left, top), cv::Size(new_width, new_height)); - cv::Mat out_sub_image = output_image(roi); - resize_out.copyTo(out_sub_image); - - for (int i = 0; i < faces.rows; ++i) - { - const int x1 = static_cast(faces.at(i, 0) * ratio) + left; - const int y1 = static_cast(faces.at(i, 1) * ratio) + top; - const int w = static_cast(faces.at(i, 2) * ratio); - const int h = static_cast(faces.at(i, 3) * ratio); - const auto match = matches.at(i); - - cv::Scalar box_color = match.second ? matched_box_color : mismatched_box_color; - // Draw bounding box - cv::rectangle(output_image, cv::Rect(x1, y1, w, h), box_color, 2); - // Draw match score - cv::putText(output_image, cv::format("%.4f", match.first), cv::Point(x1, y1+12), cv::FONT_HERSHEY_DUPLEX, 0.30, box_color); - } - return output_image; -} - -int main(int argc, char** argv) -{ - cv::CommandLineParser parser(argc, argv, - // General options - "{help h | | Print this message}" - "{backend_target b | 0 | Set DNN backend target pair:\n" - "0: (default) OpenCV implementation + CPU,\n" - "1: CUDA + GPU (CUDA),\n" - "2: CUDA + GPU (CUDA FP16),\n" - "3: TIM-VX + NPU,\n" - "4: CANN + NPU}" - "{save s | false | Whether to save result image or not}" - "{vis v | false | Whether to visualize result image or not}" - // SFace options - "{target_face t | | Set path to input image 1 (target face)}" - "{query_face q | | Set path to input image 2 (query face), omit if using camera}" - "{model m | face_recognition_sface_2021dec.onnx | Set path to the model}" - "{distance_type d | 0 | 0 = cosine, 1 = norm_l1}" - // YuNet options - "{yunet_model | ../face_detection_yunet/face_detection_yunet_2023mar.onnx | Set path to the YuNet model}" - "{detect_threshold | 0.9 | Set the minimum confidence for the model\n" - "to identify a face. Filter out faces of\n" - "conf < conf_threshold}" - "{nms_threshold | 0.3 | Set the threshold to suppress overlapped boxes.\n" - "Suppress boxes if IoU(box1, box2) >= nms_threshold\n" - ", the one of higher score is kept.}" - "{top_k | 5000 | Keep top_k bounding boxes before NMS}" - ); - - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - // General CLI options - const int backend = parser.get("backend_target"); - const bool save_flag = parser.get("save"); - const bool vis_flag = parser.get("vis"); - const int backend_id = backend_target_pairs.at(backend).first; - const int target_id = backend_target_pairs.at(backend).second; - - // YuNet CLI options - const std::string detector_model_path = parser.get("yunet_model"); - const float detect_threshold = parser.get("detect_threshold"); - const float nms_threshold = parser.get("nms_threshold"); - const int top_k = parser.get("top_k"); - - // Use YuNet as the detector backend - auto face_detector = YuNet( - detector_model_path, cv::Size(320, 320), detect_threshold, nms_threshold, top_k, backend_id, target_id); - - // SFace CLI options - const std::string target_path = parser.get("target_face"); - const std::string query_path = parser.get("query_face"); - const std::string model_path = parser.get("model"); - const int distance_type = parser.get("distance_type"); - - auto face_recognizer = SFace(model_path, backend_id, target_id, distance_type); - - if (target_path.empty()) - { - CV_Error(cv::Error::StsError, "Path to target image " + target_path + " not found"); - } - - cv::Mat target_image = cv::imread(target_path); - // Detect single face in target image - face_detector.setInputSize(target_image.size()); - face_detector.setTopK(1); - cv::Mat target_face = face_detector.infer(target_image); - // Extract features from target face - cv::Mat target_features = face_recognizer.extractFeatures(target_image, target_face.row(0)); - - if (!query_path.empty()) // use image - { - // Detect any faces in query image - cv::Mat query_image = cv::imread(query_path); - face_detector.setInputSize(query_image.size()); - face_detector.setTopK(5000); - cv::Mat query_faces = face_detector.infer(query_image); - - // Store match scores for visualization - std::vector> matches; - - for (int i = 0; i < query_faces.rows; ++i) - { - // Extract features from query face - cv::Mat query_features = face_recognizer.extractFeatures(query_image, query_faces.row(i)); - // Measure similarity of target face to query face - const auto match = face_recognizer.matchFeatures(target_features, query_features); - matches.push_back(match); - - const int x1 = static_cast(query_faces.at(i, 0)); - const int y1 = static_cast(query_faces.at(i, 1)); - const int w = static_cast(query_faces.at(i, 2)); - const int h = static_cast(query_faces.at(i, 3)); - const float conf = query_faces.at(i, 14); - - std::cout << cv::format("%d: x1=%d, y1=%d, w=%d, h=%d, conf=%.4f, match=%.4f\n", i, x1, y1, w, h, conf, match.first); - } - - if (save_flag || vis_flag) - { - auto vis_target = visualize(target_image, target_face, {{1.0, true}}); - auto vis_query = visualize(query_image, query_faces, matches); - cv::Mat output_image; - cv::hconcat(vis_target, vis_query, output_image); - - if (save_flag) - { - std::cout << "Results are saved to result.jpg\n"; - cv::imwrite("result.jpg", output_image); - } - if (vis_flag) - { - cv::namedWindow(query_path, cv::WINDOW_AUTOSIZE); - cv::imshow(query_path, output_image); - cv::waitKey(0); - } - } - } - else // use video capture - { - const int device_id = 0; - auto cap = cv::VideoCapture(device_id); - const int w = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); - const int h = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); - face_detector.setInputSize(cv::Size(w, h)); - - auto tick_meter = cv::TickMeter(); - cv::Mat query_frame; - - while (cv::waitKey(1) < 0) - { - bool has_frame = cap.read(query_frame); - if (!has_frame) - { - std::cout << "No frames grabbed! Exiting ...\n"; - break; - } - tick_meter.start(); - // Detect faces from webcam image - cv::Mat query_faces = face_detector.infer(query_frame); - tick_meter.stop(); - - // Extract features from query face - cv::Mat query_features = face_recognizer.extractFeatures(query_frame, query_faces.row(0)); - // Measure similarity of target face to query face - const auto match = face_recognizer.matchFeatures(target_features, query_features); - - const auto fps = static_cast(tick_meter.getFPS()); - - auto vis_target = visualize(target_image, target_face, {{1.0, true}}, -0.1F, cv::Size(w, h)); - auto vis_query = visualize(query_frame, query_faces, {match}, fps); - cv::Mat output_image; - cv::hconcat(vis_target, vis_query, output_image); - - // Visualize in a new window - cv::imshow("SFace Demo", output_image); - - tick_meter.reset(); - } - } - return 0; -} diff --git a/models/face_recognition_sface/demo.py b/models/face_recognition_sface/demo.py deleted file mode 100644 index c3054b14..00000000 --- a/models/face_recognition_sface/demo.py +++ /dev/null @@ -1,156 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import sys -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from sface import SFace - -sys.path.append('../face_detection_yunet') -from yunet import YuNet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser( - description="SFace: Sigmoid-Constrained Hypersphere Loss for Robust Face Recognition (https://ieeexplore.ieee.org/document/9318547)") -parser.add_argument('--target', '-t', type=str, - help='Usage: Set path to the input image 1 (target face).') -parser.add_argument('--query', '-q', type=str, - help='Usage: Set path to the input image 2 (query).') -parser.add_argument('--model', '-m', type=str, default='face_recognition_sface_2021dec.onnx', - help='Usage: Set model path, defaults to face_recognition_sface_2021dec.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--dis_type', type=int, choices=[0, 1], default=0, - help='Usage: Distance type. \'0\': cosine, \'1\': norm_l1. Defaults to \'0\'') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(img1, faces1, img2, faces2, matches, scores, target_size=[512, 512]): # target_size: (h, w) - out1 = img1.copy() - out2 = img2.copy() - matched_box_color = (0, 255, 0) # BGR - mismatched_box_color = (0, 0, 255) # BGR - - # Resize to 256x256 with the same aspect ratio - padded_out1 = np.zeros((target_size[0], target_size[1], 3)).astype(np.uint8) - h1, w1, _ = out1.shape - ratio1 = min(target_size[0] / out1.shape[0], target_size[1] / out1.shape[1]) - new_h1 = int(h1 * ratio1) - new_w1 = int(w1 * ratio1) - resized_out1 = cv.resize(out1, (new_w1, new_h1), interpolation=cv.INTER_LINEAR).astype(np.float32) - top = max(0, target_size[0] - new_h1) // 2 - bottom = top + new_h1 - left = max(0, target_size[1] - new_w1) // 2 - right = left + new_w1 - padded_out1[top : bottom, left : right] = resized_out1 - - # Draw bbox - bbox1 = faces1[0][:4] * ratio1 - x, y, w, h = bbox1.astype(np.int32) - cv.rectangle(padded_out1, (x + left, y + top), (x + left + w, y + top + h), matched_box_color, 2) - - # Resize to 256x256 with the same aspect ratio - padded_out2 = np.zeros((target_size[0], target_size[1], 3)).astype(np.uint8) - h2, w2, _ = out2.shape - ratio2 = min(target_size[0] / out2.shape[0], target_size[1] / out2.shape[1]) - new_h2 = int(h2 * ratio2) - new_w2 = int(w2 * ratio2) - resized_out2 = cv.resize(out2, (new_w2, new_h2), interpolation=cv.INTER_LINEAR).astype(np.float32) - top = max(0, target_size[0] - new_h2) // 2 - bottom = top + new_h2 - left = max(0, target_size[1] - new_w2) // 2 - right = left + new_w2 - padded_out2[top : bottom, left : right] = resized_out2 - - # Draw bbox - assert faces2.shape[0] == len(matches), "number of faces2 needs to match matches" - assert len(matches) == len(scores), "number of matches needs to match number of scores" - for index, match in enumerate(matches): - bbox2 = faces2[index][:4] * ratio2 - x, y, w, h = bbox2.astype(np.int32) - box_color = matched_box_color if match else mismatched_box_color - cv.rectangle(padded_out2, (x + left, y + top), (x + left + w, y + top + h), box_color, 2) - - score = scores[index] - text_color = matched_box_color if match else mismatched_box_color - cv.putText(padded_out2, "{:.2f}".format(score), (x + left, y + top - 5), cv.FONT_HERSHEY_DUPLEX, 0.4, text_color) - - return np.concatenate([padded_out1, padded_out2], axis=1) - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - # Instantiate SFace for face recognition - recognizer = SFace(modelPath=args.model, - disType=args.dis_type, - backendId=backend_id, - targetId=target_id) - # Instantiate YuNet for face detection - detector = YuNet(modelPath='../face_detection_yunet/face_detection_yunet_2023mar.onnx', - inputSize=[320, 320], - confThreshold=0.9, - nmsThreshold=0.3, - topK=5000, - backendId=backend_id, - targetId=target_id) - - img1 = cv.imread(args.target) - img2 = cv.imread(args.query) - - # Detect faces - detector.setInputSize([img1.shape[1], img1.shape[0]]) - faces1 = detector.infer(img1) - assert faces1.shape[0] > 0, 'Cannot find a face in {}'.format(args.target) - detector.setInputSize([img2.shape[1], img2.shape[0]]) - faces2 = detector.infer(img2) - assert faces2.shape[0] > 0, 'Cannot find a face in {}'.format(args.query) - - # Match - scores = [] - matches = [] - for face in faces2: - result = recognizer.match(img1, faces1[0][:-1], img2, face[:-1]) - scores.append(result[0]) - matches.append(result[1]) - - # Draw results - image = visualize(img1, faces1, img2, faces2, matches, scores) - - # Save results if save is true - if args.save: - print('Resutls saved to result.jpg\n') - cv.imwrite('result.jpg', image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow("SFace Demo", cv.WINDOW_AUTOSIZE) - cv.imshow("SFace Demo", image) - cv.waitKey(0) diff --git a/models/face_recognition_sface/example_outputs/demo.jpg b/models/face_recognition_sface/example_outputs/demo.jpg deleted file mode 100644 index 2d49bbc6..00000000 --- a/models/face_recognition_sface/example_outputs/demo.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0f879881a598fea6fec74e047e6a1d00e36d81de63bf0ed392b628e6ab6c2fc4 -size 156282 diff --git a/models/face_recognition_sface/face_recognition_sface_2021dec.onnx b/models/face_recognition_sface/face_recognition_sface_2021dec.onnx deleted file mode 100644 index 5817e559..00000000 --- a/models/face_recognition_sface/face_recognition_sface_2021dec.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0ba9fbfa01b5270c96627c4ef784da859931e02f04419c829e83484087c34e79 -size 38696353 diff --git a/models/face_recognition_sface/face_recognition_sface_2021dec_int8.onnx b/models/face_recognition_sface/face_recognition_sface_2021dec_int8.onnx deleted file mode 100644 index 23086ad9..00000000 --- a/models/face_recognition_sface/face_recognition_sface_2021dec_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2b0e941e6f16cc048c20aee0c8e31f569118f65d702914540f7bfdc14048d78a -size 9896933 diff --git a/models/face_recognition_sface/face_recognition_sface_2021dec_int8bq.onnx b/models/face_recognition_sface/face_recognition_sface_2021dec_int8bq.onnx deleted file mode 100644 index c9acf218..00000000 --- a/models/face_recognition_sface/face_recognition_sface_2021dec_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fb143eea07838aa532d1c95df5f69899974ea0140e1fba05e94204be13ed74ee -size 10667852 diff --git a/models/face_recognition_sface/sface.py b/models/face_recognition_sface/sface.py deleted file mode 100644 index cb467071..00000000 --- a/models/face_recognition_sface/sface.py +++ /dev/null @@ -1,63 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import numpy as np -import cv2 as cv - -class SFace: - def __init__(self, modelPath, disType=0, backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - self._model = cv.FaceRecognizerSF.create( - model=self._modelPath, - config="", - backend_id=self._backendId, - target_id=self._targetId) - - self._disType = disType # 0: cosine similarity, 1: Norm-L2 distance - assert self._disType in [0, 1], "0: Cosine similarity, 1: norm-L2 distance, others: invalid" - - self._threshold_cosine = 0.363 - self._threshold_norml2 = 1.128 - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model = cv.FaceRecognizerSF.create( - model=self._modelPath, - config="", - backend_id=self._backendId, - target_id=self._targetId) - - def _preprocess(self, image, bbox): - if bbox is None: - return image - else: - return self._model.alignCrop(image, bbox) - - def infer(self, image, bbox=None): - # Preprocess - inputBlob = self._preprocess(image, bbox) - - # Forward - features = self._model.feature(inputBlob) - return features - - def match(self, image1, face1, image2, face2): - feature1 = self.infer(image1, face1) - feature2 = self.infer(image2, face2) - - if self._disType == 0: # COSINE - cosine_score = self._model.match(feature1, feature2, self._disType) - return cosine_score, 1 if cosine_score >= self._threshold_cosine else 0 - else: # NORM_L2 - norml2_distance = self._model.match(feature1, feature2, self._disType) - return norml2_distance, 1 if norml2_distance <= self._threshold_norml2 else 0 diff --git a/models/facial_expression_recognition/CMakeLists.txt b/models/facial_expression_recognition/CMakeLists.txt deleted file mode 100644 index 5004f437..00000000 --- a/models/facial_expression_recognition/CMakeLists.txt +++ /dev/null @@ -1,30 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(CMAKE_CXX_STANDARD 11) -set(project_name "opencv_zoo_face_expression_recognition") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/facial_expression_recognition/README.md b/models/facial_expression_recognition/README.md deleted file mode 100644 index 7c1c9445..00000000 --- a/models/facial_expression_recognition/README.md +++ /dev/null @@ -1,59 +0,0 @@ - -# Progressive Teacher - -Progressive Teacher: [Boosting Facial Expression Recognition by A Semi-Supervised Progressive Teacher](https://scholar.google.com/citations?view_op=view_citation&hl=zh-CN&user=OCwcfAwAAAAJ&citation_for_view=OCwcfAwAAAAJ:u5HHmVD_uO8C) - -Note: -- Progressive Teacher is contributed by [Jing Jiang](https://scholar.google.com/citations?user=OCwcfAwAAAAJ&hl=zh-CN). -- [MobileFaceNet](https://link.springer.com/chapter/10.1007/978-3-319-97909-0_46) is used as the backbone and the model is able to classify seven basic facial expressions (angry, disgust, fearful, happy, neutral, sad, surprised). -- [facial_expression_recognition_mobilefacenet_2022july.onnx](https://github.com/opencv/opencv_zoo/raw/master/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july.onnx) is implemented thanks to [Chengrui Wang](https://github.com/crywang). -- `facial_expression_recognition_mobilefacenet_2022july_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -Results of accuracy evaluation on [RAF-DB](http://whdeng.cn/RAF/model1.html). - -| Models | Accuracy | -|-------------|----------| -| Progressive Teacher | 88.27% | - - -## Demo - -***NOTE***: This demo uses [../face_detection_yunet](../face_detection_yunet) as face detector, which supports 5-landmark detection for now (2021sep). - -### Python -Run the following command to try the demo: -```shell -# recognize the facial expression on images -python demo.py --input /path/to/image -v -``` - -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/opencv_zoo_face_expression_recognition -# detect on an image -./build/opencv_zoo_face_expression_recognition -i=/path/to/image -# get help messages -./build/opencv_zoo_face_expression_recognition -h -``` - -### Example outputs - -Note: Zoom in to to see the recognized facial expression in the top-left corner of each face boxes. - -![fer demo](./example_outputs/selfie.jpg) - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- https://ieeexplore.ieee.org/abstract/document/9629313 diff --git a/models/facial_expression_recognition/demo.cpp b/models/facial_expression_recognition/demo.cpp deleted file mode 100644 index bba5cb3f..00000000 --- a/models/facial_expression_recognition/demo.cpp +++ /dev/null @@ -1,304 +0,0 @@ -#include "opencv2/opencv.hpp" - -#include -#include -#include -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -std::vector> backend_target_pairs = { - {DNN_BACKEND_OPENCV, DNN_TARGET_CPU}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA_FP16}, - {DNN_BACKEND_TIMVX, DNN_TARGET_NPU}, - {DNN_BACKEND_CANN, DNN_TARGET_NPU} -}; - -class FER -{ -private: - Net model; - string modelPath; - float std[5][2] = { - {38.2946, 51.6963}, - {73.5318, 51.5014}, - {56.0252, 71.7366}, - {41.5493, 92.3655}, - {70.7299, 92.2041} - }; - vector expressionEnum = { - "angry", "disgust", "fearful", - "happy", "neutral", "sad", "surprised" - }; - Mat stdPoints = Mat(5, 2, CV_32F, this->std); - Size patchSize = Size(112,112); - Scalar imageMean = Scalar(0.5,0.5,0.5); - Scalar imageStd = Scalar(0.5,0.5,0.5); - - const String inputNames = "data"; - const String outputNames = "label"; - - int backend_id; - int target_id; - -public: - FER(const string& modelPath, - int backend_id = 0, - int target_id = 0) - : modelPath(modelPath), backend_id(backend_id), target_id(target_id) - { - this->model = readNet(modelPath); - this->model.setPreferableBackend(backend_id); - this->model.setPreferableTarget(target_id); - } - - Mat preprocess(const Mat image, const Mat points) - { - // image alignment - Mat transformation = estimateAffine2D(points, this->stdPoints); - Mat aligned = Mat::zeros(this->patchSize.height, this->patchSize.width, image.type()); - warpAffine(image, aligned, transformation, this->patchSize); - - // image normalization - aligned.convertTo(aligned, CV_32F, 1.0 / 255.0); - aligned -= imageMean; - aligned /= imageStd; - - return blobFromImage(aligned);; - } - - String infer(const Mat image, const Mat facePoints) - { - Mat points = facePoints(Rect(4, 0, facePoints.cols-5, facePoints.rows)).reshape(2, 5); - Mat inputBlob = preprocess(image, points); - - this->model.setInput(inputBlob, this->inputNames); - Mat outputBlob = this->model.forward(this->outputNames); - - Point maxLoc; - minMaxLoc(outputBlob, nullptr, nullptr, nullptr, &maxLoc); - - return getDesc(maxLoc.x); - } - - String getDesc(int ind) - { - - if (ind >= 0 && ind < this->expressionEnum.size()) - { - return this->expressionEnum[ind]; - } - else - { - cerr << "Error: Index out of bounds." << endl; - return ""; - } - } - -}; - -class YuNet -{ -public: - YuNet(const string& model_path, - const Size& input_size = Size(320, 320), - float conf_threshold = 0.6f, - float nms_threshold = 0.3f, - int top_k = 5000, - int backend_id = 0, - int target_id = 0) - : model_path_(model_path), input_size_(input_size), - conf_threshold_(conf_threshold), nms_threshold_(nms_threshold), - top_k_(top_k), backend_id_(backend_id), target_id_(target_id) - { - model = FaceDetectorYN::create(model_path_, "", input_size_, conf_threshold_, nms_threshold_, top_k_, backend_id_, target_id_); - } - - void setBackendAndTarget(int backend_id, int target_id) - { - backend_id_ = backend_id; - target_id_ = target_id; - model = FaceDetectorYN::create(model_path_, "", input_size_, conf_threshold_, nms_threshold_, top_k_, backend_id_, target_id_); - } - - /* Overwrite the input size when creating the model. Size format: [Width, Height]. - */ - void setInputSize(const Size& input_size) - { - input_size_ = input_size; - model->setInputSize(input_size_); - } - - Mat infer(const Mat image) - { - Mat res; - model->detect(image, res); - return res; - } - -private: - Ptr model; - - string model_path_; - Size input_size_; - float conf_threshold_; - float nms_threshold_; - int top_k_; - int backend_id_; - int target_id_; -}; - -cv::Mat visualize(const cv::Mat& image, const cv::Mat& faces, const vector expressions, float fps = -1.f) -{ - static cv::Scalar box_color{0, 255, 0}; - static std::vector landmark_color{ - cv::Scalar(255, 0, 0), // right eye - cv::Scalar( 0, 0, 255), // left eye - cv::Scalar( 0, 255, 0), // nose tip - cv::Scalar(255, 0, 255), // right mouth corner - cv::Scalar( 0, 255, 255) // left mouth corner - }; - static cv::Scalar text_color{0, 255, 0}; - - auto output_image = image.clone(); - - if (fps >= 0) - { - cv::putText(output_image, cv::format("FPS: %.2f", fps), cv::Point(0, 15), cv::FONT_HERSHEY_SIMPLEX, 0.5, text_color, 2); - } - - for (int i = 0; i < faces.rows; ++i) - { - // Draw bounding boxes - int x1 = static_cast(faces.at(i, 0)); - int y1 = static_cast(faces.at(i, 1)); - int w = static_cast(faces.at(i, 2)); - int h = static_cast(faces.at(i, 3)); - cv::rectangle(output_image, cv::Rect(x1, y1, w, h), box_color, 2); - - // Expression as text - String exp = expressions[i]; - cv::putText(output_image, exp, cv::Point(x1, y1+12), cv::FONT_HERSHEY_DUPLEX, 0.5, text_color); - - // Draw landmarks - for (int j = 0; j < landmark_color.size(); ++j) - { - int x = static_cast(faces.at(i, 2*j+4)), y = static_cast(faces.at(i, 2*j+5)); - cv::circle(output_image, cv::Point(x, y), 2, landmark_color[j], 2); - } - } - return output_image; -} - -string keys = -"{ help h | | Print help message. }" -"{ model m | facial_expression_recognition_mobilefacenet_2022july.onnx | Usage: Path to the model, defaults to facial_expression_recognition_mobilefacenet_2022july.onnx }" -"{ yunet_model ym | ../face_detection_yunet/face_detection_yunet_2023mar.onnx | Usage: Path to the face detection yunet model, defaults to face_detection_yunet_2023mar.onnx }" -"{ input i | | Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ backend_target t | 0 | Choose one of the backend-target pair to run this demo:\n" - "0: (default) OpenCV implementation + CPU,\n" - "1: CUDA + GPU (CUDA),\n" - "2: CUDA + GPU (CUDA FP16),\n" - "3: TIM-VX + NPU,\n" - "4: CANN + NPU}" -"{ save s | false | Specify to save results.}" -"{ vis v | true | Specify to open a window for result visualization.}" -; - - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Facial Expression Recognition"); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - string modelPath = parser.get("model"); - string yunetModelPath = parser.get("yunet_model"); - string inputPath = parser.get("input"); - uint8_t backendTarget = parser.get("backend_target"); - bool saveFlag = parser.get("save"); - bool visFlag = parser.get("vis"); - - if (modelPath.empty()) - CV_Error(Error::StsError, "Model file " + modelPath + " not found"); - - if (yunetModelPath.empty()) - CV_Error(Error::StsError, "Face Detection Model file " + yunetModelPath + " not found"); - - YuNet faceDetectionModel(yunetModelPath); - FER expressionRecognitionModel(modelPath, backend_target_pairs[backendTarget].first, backend_target_pairs[backendTarget].second); - - VideoCapture cap; - if (!inputPath.empty()) - cap.open(samples::findFile(inputPath)); - else - cap.open(0); - - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot opend video or file"); - - Mat frame; - static const std::string kWinName = "Facial Expression Demo"; - - - while (waitKey(1) < 0) - { - cap >> frame; - - if (frame.empty()) - { - if(inputPath.empty()) - cout << "Frame is empty" << endl; - break; - } - - faceDetectionModel.setInputSize(frame.size()); - - Mat faces = faceDetectionModel.infer(frame); - vector expressions; - - for (int i = 0; i < faces.rows; ++i) - { - Mat face = faces.row(i); - String exp = expressionRecognitionModel.infer(frame, face); - expressions.push_back(exp); - - int x1 = static_cast(faces.at(i, 0)); - int y1 = static_cast(faces.at(i, 1)); - int w = static_cast(faces.at(i, 2)); - int h = static_cast(faces.at(i, 3)); - float conf = faces.at(i, 14); - - std::cout << cv::format("%d: x1=%d, y1=%d, w=%d, h=%d, conf=%.4f expression=%s\n", i, x1, y1, w, h, conf, exp.c_str()); - - } - - Mat res_frame = visualize(frame, faces, expressions); - - if(visFlag || inputPath.empty()) - { - imshow(kWinName, res_frame); - if(!inputPath.empty()) - waitKey(0); - } - if(saveFlag) - { - cout << "Results are saved to result.jpg" << endl; - - cv::imwrite("result.jpg", res_frame); - } - } - - - return 0; - -} - diff --git a/models/facial_expression_recognition/demo.py b/models/facial_expression_recognition/demo.py deleted file mode 100644 index 3b273928..00000000 --- a/models/facial_expression_recognition/demo.py +++ /dev/null @@ -1,135 +0,0 @@ -import sys -import argparse -import copy -import datetime - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from facial_fer_model import FacialExpressionRecog - -sys.path.append('../face_detection_yunet') -from yunet import YuNet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='Facial Expression Recognition') -parser.add_argument('--input', '-i', type=str, - help='Path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='./facial_expression_recognition_mobilefacenet_2022july.onnx', - help='Path to the facial expression recognition model.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--save', '-s', action='store_true', - help='Specify to save results. This flag is invalid when using camera.') -parser.add_argument('--vis', '-v', action='store_true', - help='Specify to open a window for result visualization. This flag is invalid when using camera.') -args = parser.parse_args() - -def visualize(image, det_res, fer_res, box_color=(0, 255, 0), text_color=(0, 0, 255)): - - print('%s %3d faces detected.' % (datetime.datetime.now(), len(det_res))) - - output = image.copy() - landmark_color = [ - (255, 0, 0), # right eye - (0, 0, 255), # left eye - (0, 255, 0), # nose tip - (255, 0, 255), # right mouth corner - (0, 255, 255) # left mouth corner - ] - - for ind, (det, fer_type) in enumerate(zip(det_res, fer_res)): - bbox = det[0:4].astype(np.int32) - fer_type = FacialExpressionRecog.getDesc(fer_type) - print("Face %2d: %d %d %d %d %s." % (ind, bbox[0], bbox[1], bbox[0]+bbox[2], bbox[1]+bbox[3], fer_type)) - cv.rectangle(output, (bbox[0], bbox[1]), (bbox[0]+bbox[2], bbox[1]+bbox[3]), box_color, 2) - cv.putText(output, fer_type, (bbox[0], bbox[1]+12), cv.FONT_HERSHEY_DUPLEX, 0.5, text_color) - landmarks = det[4:14].astype(np.int32).reshape((5, 2)) - for idx, landmark in enumerate(landmarks): - cv.circle(output, landmark, 2, landmark_color[idx], 2) - return output - - -def process(detect_model, fer_model, frame): - h, w, _ = frame.shape - detect_model.setInputSize([w, h]) - dets = detect_model.infer(frame) - - if dets is None: - return False, None, None - - fer_res = np.zeros(0, dtype=np.int8) - for face_points in dets: - fer_res = np.concatenate((fer_res, fer_model.infer(frame, face_points[:-1])), axis=0) - return True, dets, fer_res - - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - detect_model = YuNet(modelPath='../face_detection_yunet/face_detection_yunet_2023mar.onnx') - - fer_model = FacialExpressionRecog(modelPath=args.model, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - image = cv.imread(args.input) - - # Get detection and fer results - status, dets, fer_res = process(detect_model, fer_model, image) - - if status: - # Draw results on the input image - image = visualize(image, dets, fer_res) - - # Save results - if args.save: - cv.imwrite('result.jpg', image) - print('Results saved to result.jpg\n') - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # Get detection and fer results - status, dets, fer_res = process(detect_model, fer_model, frame) - - if status: - # Draw results on the input image - frame = visualize(frame, dets, fer_res) - - # Visualize results in a new window - cv.imshow('FER Demo', frame) diff --git a/models/facial_expression_recognition/example_outputs/selfie.jpg b/models/facial_expression_recognition/example_outputs/selfie.jpg deleted file mode 100644 index 5a74c3d3..00000000 --- a/models/facial_expression_recognition/example_outputs/selfie.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e3f8148169fe993afd0164200335a24301f1221a45535d7a938a0d133f2149ac -size 1233078 diff --git a/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july.onnx b/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july.onnx deleted file mode 100644 index 67dd024c..00000000 --- a/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4f61307602fc089ce20488a31d4e4614e3c9753a7d6c41578c854858b183e1a9 -size 4791892 diff --git a/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8.onnx b/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8.onnx deleted file mode 100644 index 06473970..00000000 --- a/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f0d7093aff10e2638c734c5f18a6a7eabd2b9239b20bdb9b8090865a6f69a1ed -size 1364007 diff --git a/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8bq.onnx b/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8bq.onnx deleted file mode 100644 index b396210d..00000000 --- a/models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0c3eaf9d0a7d442c0aa3beb3234243e1cdff9ad8871fb3cec346e90874caf57d -size 1376702 diff --git a/models/facial_expression_recognition/facial_fer_model.py b/models/facial_expression_recognition/facial_fer_model.py deleted file mode 100644 index 307af559..00000000 --- a/models/facial_expression_recognition/facial_fer_model.py +++ /dev/null @@ -1,176 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2022, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import numpy as np -import cv2 as cv - -class FacialExpressionRecog: - def __init__(self, modelPath, backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - - self._model = cv.dnn.readNet(self._modelPath) - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - self._align_model = FaceAlignment() - - self._inputNames = 'data' - self._outputNames = ['label'] - self._inputSize = [112, 112] - self._mean = np.array([0.5, 0.5, 0.5])[np.newaxis, np.newaxis, :] - self._std = np.array([0.5, 0.5, 0.5])[np.newaxis, np.newaxis, :] - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def _preprocess(self, image, bbox): - if bbox is not None: - image = self._align_model.get_align_image(image, bbox[4:].reshape(-1, 2)) - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - image = image.astype(np.float32, copy=False) / 255.0 - image -= self._mean - image /= self._std - return cv.dnn.blobFromImage(image) - - def infer(self, image, bbox=None): - # Preprocess - inputBlob = self._preprocess(image, bbox) - - # Forward - self._model.setInput(inputBlob, self._inputNames) - outputBlob = self._model.forward(self._outputNames) - - # Postprocess - results = self._postprocess(outputBlob) - - return results - - def _postprocess(self, outputBlob): - result = np.argmax(outputBlob[0], axis=1).astype(np.uint8) - return result - - @staticmethod - def getDesc(ind): - _expression_enum = ["angry", "disgust", "fearful", "happy", "neutral", "sad", "surprised"] - return _expression_enum[ind] - - -class FaceAlignment(): - def __init__(self, reflective=False): - self._std_points = np.array([[38.2946, 51.6963], [73.5318, 51.5014], [56.0252, 71.7366], [41.5493, 92.3655], [70.7299, 92.2041]]) - self.reflective = reflective - - def __tformfwd(self, trans, uv): - uv = np.hstack((uv, np.ones((uv.shape[0], 1)))) - xy = np.dot(uv, trans) - xy = xy[:, 0:-1] - return xy - - def __tforminv(self, trans, uv): - Tinv = np.linalg.inv(trans) - xy = self.__tformfwd(Tinv, uv) - return xy - - def __findNonreflectiveSimilarity(self, uv, xy, options=None): - options = {"K": 2} - - K = options["K"] - M = xy.shape[0] - x = xy[:, 0].reshape((-1, 1)) # use reshape to keep a column vector - y = xy[:, 1].reshape((-1, 1)) # use reshape to keep a column vector - # print '--->x, y:\n', x, y - - tmp1 = np.hstack((x, y, np.ones((M, 1)), np.zeros((M, 1)))) - tmp2 = np.hstack((y, -x, np.zeros((M, 1)), np.ones((M, 1)))) - X = np.vstack((tmp1, tmp2)) - # print '--->X.shape: ', X.shape - # print 'X:\n', X - - u = uv[:, 0].reshape((-1, 1)) # use reshape to keep a column vector - v = uv[:, 1].reshape((-1, 1)) # use reshape to keep a column vector - U = np.vstack((u, v)) - # print '--->U.shape: ', U.shape - # print 'U:\n', U - - # We know that X * r = U - if np.linalg.matrix_rank(X) >= 2 * K: - r, _, _, _ = np.linalg.lstsq(X, U, rcond=-1) - # print(r, X, U, sep="\n") - r = np.squeeze(r) - else: - raise Exception("cp2tform:twoUniquePointsReq") - - sc = r[0] - ss = r[1] - tx = r[2] - ty = r[3] - - Tinv = np.array([[sc, -ss, 0], [ss, sc, 0], [tx, ty, 1]]) - T = np.linalg.inv(Tinv) - T[:, 2] = np.array([0, 0, 1]) - - return T, Tinv - - def __findSimilarity(self, uv, xy, options=None): - options = {"K": 2} - - # uv = np.array(uv) - # xy = np.array(xy) - - # Solve for trans1 - trans1, trans1_inv = self.__findNonreflectiveSimilarity(uv, xy, options) - - # manually reflect the xy data across the Y-axis - xyR = xy - xyR[:, 0] = -1 * xyR[:, 0] - # Solve for trans2 - trans2r, trans2r_inv = self.__findNonreflectiveSimilarity(uv, xyR, options) - - # manually reflect the tform to undo the reflection done on xyR - TreflectY = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, 1]]) - trans2 = np.dot(trans2r, TreflectY) - - # Figure out if trans1 or trans2 is better - xy1 = self.__tformfwd(trans1, uv) - norm1 = np.linalg.norm(xy1 - xy) - xy2 = self.__tformfwd(trans2, uv) - norm2 = np.linalg.norm(xy2 - xy) - - if norm1 <= norm2: - return trans1, trans1_inv - else: - trans2_inv = np.linalg.inv(trans2) - return trans2, trans2_inv - - def __get_similarity_transform(self, src_pts, dst_pts): - if self.reflective: - trans, trans_inv = self.__findSimilarity(src_pts, dst_pts) - else: - trans, trans_inv = self.__findNonreflectiveSimilarity(src_pts, dst_pts) - return trans, trans_inv - - def __cvt_tform_mat_for_cv2(self, trans): - cv2_trans = trans[:, 0:2].T - return cv2_trans - - def get_similarity_transform_for_cv2(self, src_pts, dst_pts): - trans, trans_inv = self.__get_similarity_transform(src_pts, dst_pts) - cv2_trans = self.__cvt_tform_mat_for_cv2(trans) - return cv2_trans, trans - - def get_align_image(self, image, lm5_points): - assert lm5_points is not None - tfm, trans = self.get_similarity_transform_for_cv2(lm5_points, self._std_points) - return cv.warpAffine(image, tfm, (112, 112)) diff --git a/models/handpose_estimation_mediapipe/LICENSE b/models/handpose_estimation_mediapipe/LICENSE deleted file mode 100644 index 7a4a3ea2..00000000 --- a/models/handpose_estimation_mediapipe/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/handpose_estimation_mediapipe/README.md b/models/handpose_estimation_mediapipe/README.md deleted file mode 100644 index bab4ffd8..00000000 --- a/models/handpose_estimation_mediapipe/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Hand pose estimation from MediaPipe Handpose - -This model estimates 21 hand keypoints per detected hand from [palm detector](../palm_detection_mediapipe). (The image below is referenced from [MediaPipe Hands Keypoints](https://github.com/tensorflow/tfjs-models/tree/master/hand-pose-detection#mediapipe-hands-keypoints-used-in-mediapipe-hands)) - -![MediaPipe Hands Keypoints](./example_outputs/hand_keypoints.png) - -Hand gesture classification demo (0-9) -![hand gestures](./example_outputs/gesture_classification.png) - -This model is converted from TFlite to ONNX using following tools: -- TFLite model to ONNX: https://github.com/onnx/tensorflow-onnx -- simplified by [onnx-simplifier](https://github.com/daquexian/onnx-simplifier) - -**Note**: -- The int8-quantized model may produce invalid results due to a significant drop of accuracy. -- Visit https://github.com/google/mediapipe/blob/master/docs/solutions/models.md#hands for models of larger scale. -- `handpose_estimation_mediapipe_2023feb_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -Run the following commands to try the demo: -```bash -# detect on camera input -python demo.py -# detect on an image -python demo.py -i /path/to/image -v -``` - -### Example outputs - -![webcam demo](./example_outputs/mphandpose_demo.webp) - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- MediaPipe Handpose: https://developers.google.com/mediapipe/solutions/vision/hand_landmarker -- MediaPipe hands model and model card: https://github.com/google/mediapipe/blob/master/docs/solutions/models.md#hands -- Handpose TFJS:https://github.com/tensorflow/tfjs-models/tree/master/handpose -- Int8 model quantized with rgb evaluation set of FreiHAND: https://lmb.informatik.uni-freiburg.de/resources/datasets/FreihandDataset.en.html diff --git a/models/handpose_estimation_mediapipe/demo.py b/models/handpose_estimation_mediapipe/demo.py deleted file mode 100644 index 37c3cf03..00000000 --- a/models/handpose_estimation_mediapipe/demo.py +++ /dev/null @@ -1,356 +0,0 @@ -import sys -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from mp_handpose import MPHandPose - -sys.path.append('../palm_detection_mediapipe') -from mp_palmdet import MPPalmDet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='Hand Pose Estimation from MediaPipe') -parser.add_argument('--input', '-i', type=str, - help='Path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='./handpose_estimation_mediapipe_2023feb.onnx', - help='Path to the model.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--conf_threshold', type=float, default=0.9, - help='Filter out hands of confidence < conf_threshold.') -parser.add_argument('--save', '-s', action='store_true', - help='Specify to save results. This flag is invalid when using camera.') -parser.add_argument('--vis', '-v', action='store_true', - help='Specify to open a window for result visualization. This flag is invalid when using camera.') -args = parser.parse_args() - - -def visualize(image, hands, print_result=False): - display_screen = image.copy() - display_3d = np.zeros((400, 400, 3), np.uint8) - cv.line(display_3d, (200, 0), (200, 400), (255, 255, 255), 2) - cv.line(display_3d, (0, 200), (400, 200), (255, 255, 255), 2) - cv.putText(display_3d, 'Main View', (0, 12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - cv.putText(display_3d, 'Top View', (200, 12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - cv.putText(display_3d, 'Left View', (0, 212), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - cv.putText(display_3d, 'Right View', (200, 212), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - is_draw = False # ensure only one hand is drawn - - def draw_lines(image, landmarks, is_draw_point=True, thickness=2): - cv.line(image, landmarks[0], landmarks[1], (255, 255, 255), thickness) - cv.line(image, landmarks[1], landmarks[2], (255, 255, 255), thickness) - cv.line(image, landmarks[2], landmarks[3], (255, 255, 255), thickness) - cv.line(image, landmarks[3], landmarks[4], (255, 255, 255), thickness) - - cv.line(image, landmarks[0], landmarks[5], (255, 255, 255), thickness) - cv.line(image, landmarks[5], landmarks[6], (255, 255, 255), thickness) - cv.line(image, landmarks[6], landmarks[7], (255, 255, 255), thickness) - cv.line(image, landmarks[7], landmarks[8], (255, 255, 255), thickness) - - cv.line(image, landmarks[0], landmarks[9], (255, 255, 255), thickness) - cv.line(image, landmarks[9], landmarks[10], (255, 255, 255), thickness) - cv.line(image, landmarks[10], landmarks[11], (255, 255, 255), thickness) - cv.line(image, landmarks[11], landmarks[12], (255, 255, 255), thickness) - - cv.line(image, landmarks[0], landmarks[13], (255, 255, 255), thickness) - cv.line(image, landmarks[13], landmarks[14], (255, 255, 255), thickness) - cv.line(image, landmarks[14], landmarks[15], (255, 255, 255), thickness) - cv.line(image, landmarks[15], landmarks[16], (255, 255, 255), thickness) - - cv.line(image, landmarks[0], landmarks[17], (255, 255, 255), thickness) - cv.line(image, landmarks[17], landmarks[18], (255, 255, 255), thickness) - cv.line(image, landmarks[18], landmarks[19], (255, 255, 255), thickness) - cv.line(image, landmarks[19], landmarks[20], (255, 255, 255), thickness) - - if is_draw_point: - for p in landmarks: - cv.circle(image, p, thickness, (0, 0, 255), -1) - - # used for gesture classification - gc = GestureClassification() - - for idx, handpose in enumerate(hands): - conf = handpose[-1] - bbox = handpose[0:4].astype(np.int32) - handedness = handpose[-2] - if handedness <= 0.5: - handedness_text = 'Left' - else: - handedness_text = 'Right' - landmarks_screen = handpose[4:67].reshape(21, 3).astype(np.int32) - landmarks_word = handpose[67:130].reshape(21, 3) - - gesture = gc.classify(landmarks_screen) - - # Print results - if print_result: - print('-----------hand {}-----------'.format(idx + 1)) - print('conf: {:.2f}'.format(conf)) - print('handedness: {}'.format(handedness_text)) - print('gesture: {}'.format(gesture)) - print('hand box: {}'.format(bbox)) - print('hand landmarks: ') - for l in landmarks_screen: - print('\t{}'.format(l)) - print('hand world landmarks: ') - for l in landmarks_word: - print('\t{}'.format(l)) - - # draw box - cv.rectangle(display_screen, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (0, 255, 0), 2) - # draw handedness - cv.putText(display_screen, '{}'.format(handedness_text), (bbox[0], bbox[1] + 12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - # draw gesture - cv.putText(display_screen, '{}'.format(gesture), (bbox[0], bbox[1] + 30), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - # Draw line between each key points - landmarks_xy = landmarks_screen[:, 0:2] - draw_lines(display_screen, landmarks_xy, is_draw_point=False) - - # z value is relative to WRIST - for p in landmarks_screen: - r = max(5 - p[2] // 5, 0) - r = min(r, 14) - cv.circle(display_screen, np.array([p[0], p[1]]), r, (0, 0, 255), -1) - - if is_draw is False: - is_draw = True - # Main view - landmarks_xy = landmarks_word[:, [0, 1]] - landmarks_xy = (landmarks_xy * 1000 + 100).astype(np.int32) - draw_lines(display_3d, landmarks_xy, thickness=5) - - # Top view - landmarks_xz = landmarks_word[:, [0, 2]] - landmarks_xz[:, 1] = -landmarks_xz[:, 1] - landmarks_xz = (landmarks_xz * 1000 + np.array([300, 100])).astype(np.int32) - draw_lines(display_3d, landmarks_xz, thickness=5) - - # Left view - landmarks_yz = landmarks_word[:, [2, 1]] - landmarks_yz[:, 0] = -landmarks_yz[:, 0] - landmarks_yz = (landmarks_yz * 1000 + np.array([100, 300])).astype(np.int32) - draw_lines(display_3d, landmarks_yz, thickness=5) - - # Right view - landmarks_zy = landmarks_word[:, [2, 1]] - landmarks_zy = (landmarks_zy * 1000 + np.array([300, 300])).astype(np.int32) - draw_lines(display_3d, landmarks_zy, thickness=5) - - return display_screen, display_3d - -class GestureClassification: - def _vector_2_angle(self, v1, v2): - uv1 = v1 / np.linalg.norm(v1) - uv2 = v2 / np.linalg.norm(v2) - angle = np.degrees(np.arccos(np.dot(uv1, uv2))) - return angle - - def _hand_angle(self, hand): - angle_list = [] - # thumb - angle_ = self._vector_2_angle( - np.array([hand[0][0] - hand[2][0], hand[0][1] - hand[2][1]]), - np.array([hand[3][0] - hand[4][0], hand[3][1] - hand[4][1]]) - ) - angle_list.append(angle_) - # index - angle_ = self._vector_2_angle( - np.array([hand[0][0] - hand[6][0], hand[0][1] - hand[6][1]]), - np.array([hand[7][0] - hand[8][0], hand[7][1] - hand[8][1]]) - ) - angle_list.append(angle_) - # middle - angle_ = self._vector_2_angle( - np.array([hand[0][0] - hand[10][0], hand[0][1] - hand[10][1]]), - np.array([hand[11][0] - hand[12][0], hand[11][1] - hand[12][1]]) - ) - angle_list.append(angle_) - # ring - angle_ = self._vector_2_angle( - np.array([hand[0][0] - hand[14][0], hand[0][1] - hand[14][1]]), - np.array([hand[15][0] - hand[16][0], hand[15][1] - hand[16][1]]) - ) - angle_list.append(angle_) - # pink - angle_ = self._vector_2_angle( - np.array([hand[0][0] - hand[18][0], hand[0][1] - hand[18][1]]), - np.array([hand[19][0] - hand[20][0], hand[19][1] - hand[20][1]]) - ) - angle_list.append(angle_) - return angle_list - - def _finger_status(self, lmList): - fingerList = [] - originx, originy = lmList[0] - keypoint_list = [[5, 4], [6, 8], [10, 12], [14, 16], [18, 20]] - for point in keypoint_list: - x1, y1 = lmList[point[0]] - x2, y2 = lmList[point[1]] - if np.hypot(x2 - originx, y2 - originy) > np.hypot(x1 - originx, y1 - originy): - fingerList.append(True) - else: - fingerList.append(False) - - return fingerList - - def _classify(self, hand): - thr_angle = 65. - thr_angle_thumb = 30. - thr_angle_s = 49. - gesture_str = "Undefined" - - angle_list = self._hand_angle(hand) - - thumbOpen, firstOpen, secondOpen, thirdOpen, fourthOpen = self._finger_status(hand) - # Number - if (angle_list[0] > thr_angle_thumb) and (angle_list[1] > thr_angle) and (angle_list[2] > thr_angle) and ( - angle_list[3] > thr_angle) and (angle_list[4] > thr_angle) and \ - not firstOpen and not secondOpen and not thirdOpen and not fourthOpen: - gesture_str = "Zero" - elif (angle_list[0] > thr_angle_thumb) and (angle_list[1] < thr_angle_s) and (angle_list[2] > thr_angle) and ( - angle_list[3] > thr_angle) and (angle_list[4] > thr_angle) and \ - firstOpen and not secondOpen and not thirdOpen and not fourthOpen: - gesture_str = "One" - elif (angle_list[0] > thr_angle_thumb) and (angle_list[1] < thr_angle_s) and (angle_list[2] < thr_angle_s) and ( - angle_list[3] > thr_angle) and (angle_list[4] > thr_angle) and \ - not thumbOpen and firstOpen and secondOpen and not thirdOpen and not fourthOpen: - gesture_str = "Two" - elif (angle_list[0] > thr_angle_thumb) and (angle_list[1] < thr_angle_s) and (angle_list[2] < thr_angle_s) and ( - angle_list[3] < thr_angle_s) and (angle_list[4] > thr_angle) and \ - not thumbOpen and firstOpen and secondOpen and thirdOpen and not fourthOpen: - gesture_str = "Three" - elif (angle_list[0] > thr_angle_thumb) and (angle_list[1] < thr_angle_s) and (angle_list[2] < thr_angle_s) and ( - angle_list[3] < thr_angle_s) and (angle_list[4] < thr_angle) and \ - firstOpen and secondOpen and thirdOpen and fourthOpen: - gesture_str = "Four" - elif (angle_list[0] < thr_angle_s) and (angle_list[1] < thr_angle_s) and (angle_list[2] < thr_angle_s) and ( - angle_list[3] < thr_angle_s) and (angle_list[4] < thr_angle_s) and \ - thumbOpen and firstOpen and secondOpen and thirdOpen and fourthOpen: - gesture_str = "Five" - elif (angle_list[0] < thr_angle_s) and (angle_list[1] > thr_angle) and (angle_list[2] > thr_angle) and ( - angle_list[3] > thr_angle) and (angle_list[4] < thr_angle_s) and \ - thumbOpen and not firstOpen and not secondOpen and not thirdOpen and fourthOpen: - gesture_str = "Six" - elif (angle_list[0] < thr_angle_s) and (angle_list[1] < thr_angle) and (angle_list[2] > thr_angle) and ( - angle_list[3] > thr_angle) and (angle_list[4] > thr_angle_s) and \ - thumbOpen and firstOpen and not secondOpen and not thirdOpen and not fourthOpen: - gesture_str = "Seven" - elif (angle_list[0] < thr_angle_s) and (angle_list[1] < thr_angle) and (angle_list[2] < thr_angle) and ( - angle_list[3] > thr_angle) and (angle_list[4] > thr_angle_s) and \ - thumbOpen and firstOpen and secondOpen and not thirdOpen and not fourthOpen: - gesture_str = "Eight" - elif (angle_list[0] < thr_angle_s) and (angle_list[1] < thr_angle) and (angle_list[2] < thr_angle) and ( - angle_list[3] < thr_angle) and (angle_list[4] > thr_angle_s) and \ - thumbOpen and firstOpen and secondOpen and thirdOpen and not fourthOpen: - gesture_str = "Nine" - - return gesture_str - - def classify(self, landmarks): - hand = landmarks[:21, :2] - gesture = self._classify(hand) - return gesture - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - # palm detector - palm_detector = MPPalmDet(modelPath='../palm_detection_mediapipe/palm_detection_mediapipe_2023feb.onnx', - nmsThreshold=0.3, - scoreThreshold=0.6, - backendId=backend_id, - targetId=target_id) - # handpose detector - handpose_detector = MPHandPose(modelPath=args.model, - confThreshold=args.conf_threshold, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - image = cv.imread(args.input) - - # Palm detector inference - palms = palm_detector.infer(image) - hands = np.empty(shape=(0, 132)) - - # Estimate the pose of each hand - for palm in palms: - # Handpose detector inference - handpose = handpose_detector.infer(image, palm) - if handpose is not None: - hands = np.vstack((hands, handpose)) - # Draw results on the input image - image, view_3d = visualize(image, hands, True) - - if len(palms) == 0: - print('No palm detected!') - else: - print('Palm detected!') - - # Save results - if args.save: - cv.imwrite('result.jpg', image) - print('Results saved to result.jpg\n') - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.imshow('3D HandPose Demo', view_3d) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # Palm detector inference - palms = palm_detector.infer(frame) - hands = np.empty(shape=(0, 132)) - - tm.start() - # Estimate the pose of each hand - for palm in palms: - # Handpose detector inference - handpose = handpose_detector.infer(frame, palm) - if handpose is not None: - hands = np.vstack((hands, handpose)) - tm.stop() - # Draw results on the input image - frame, view_3d = visualize(frame, hands) - - if len(palms) == 0: - print('No palm detected!') - else: - print('Palm detected!') - cv.putText(frame, 'FPS: {:.2f}'.format(tm.getFPS()), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255)) - - cv.imshow('MediaPipe Handpose Detection Demo', frame) - cv.imshow('3D HandPose Demo', view_3d) - tm.reset() diff --git a/models/handpose_estimation_mediapipe/example_outputs/gesture_classification.png b/models/handpose_estimation_mediapipe/example_outputs/gesture_classification.png deleted file mode 100644 index 29c13c06..00000000 --- a/models/handpose_estimation_mediapipe/example_outputs/gesture_classification.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:759c971087aef3068527f46b401af9fdead5a8dda3f68257d445bfb279fa99a9 -size 513541 diff --git a/models/handpose_estimation_mediapipe/example_outputs/hand_keypoints.png b/models/handpose_estimation_mediapipe/example_outputs/hand_keypoints.png deleted file mode 100644 index b3541222..00000000 --- a/models/handpose_estimation_mediapipe/example_outputs/hand_keypoints.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:24ce896420149a7b9dc0c05e547681e3027b4a65ce3a0a6c73685ba1cc535496 -size 159592 diff --git a/models/handpose_estimation_mediapipe/example_outputs/mphandpose_demo.webp b/models/handpose_estimation_mediapipe/example_outputs/mphandpose_demo.webp deleted file mode 100644 index 1bb1441a..00000000 --- a/models/handpose_estimation_mediapipe/example_outputs/mphandpose_demo.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c7873ed63f8c19a5961b53d866d71f0fe767f137e4ea6dbb6756a03b93b2b6a0 -size 1656194 diff --git a/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb.onnx b/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb.onnx deleted file mode 100644 index 3d660a34..00000000 --- a/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:db0898ae717b76b075d9bf563af315b29562e11f8df5027a1ef07b02bef6d81c -size 4099621 diff --git a/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8.onnx b/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8.onnx deleted file mode 100644 index d6301154..00000000 --- a/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e97bc1fb83b641954d33424c82b6ade719d0f73250bdb91710ecfd5f7b47e321 -size 1167628 diff --git a/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8bq.onnx b/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8bq.onnx deleted file mode 100644 index 31b2bf2c..00000000 --- a/models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d7e63ed33e39c8b532a04a5466ac68c8680981637df288bcf93286bec08befbd -size 1222348 diff --git a/models/handpose_estimation_mediapipe/mp_handpose.py b/models/handpose_estimation_mediapipe/mp_handpose.py deleted file mode 100644 index a694c952..00000000 --- a/models/handpose_estimation_mediapipe/mp_handpose.py +++ /dev/null @@ -1,200 +0,0 @@ -import numpy as np -import cv2 as cv - -class MPHandPose: - def __init__(self, modelPath, confThreshold=0.8, backendId=0, targetId=0): - self.model_path = modelPath - self.conf_threshold = confThreshold - self.backend_id = backendId - self.target_id = targetId - - self.input_size = np.array([224, 224]) # wh - self.PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2] - self.PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0 - self.PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2 - self.PALM_BOX_PRE_SHIFT_VECTOR = [0, 0] - self.PALM_BOX_PRE_ENLARGE_FACTOR = 4 - self.PALM_BOX_SHIFT_VECTOR = [0, -0.4] - self.PALM_BOX_ENLARGE_FACTOR = 3 - self.HAND_BOX_SHIFT_VECTOR = [0, -0.1] - self.HAND_BOX_ENLARGE_FACTOR = 1.65 - - self.model = cv.dnn.readNet(self.model_path) - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self.backend_id = backendId - self.target_id = targetId - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - def _cropAndPadFromPalm(self, image, palm_bbox, for_rotation = False): - # shift bounding box - wh_palm_bbox = palm_bbox[1] - palm_bbox[0] - if for_rotation: - shift_vector = self.PALM_BOX_PRE_SHIFT_VECTOR - else: - shift_vector = self.PALM_BOX_SHIFT_VECTOR - shift_vector = shift_vector * wh_palm_bbox - palm_bbox = palm_bbox + shift_vector - # enlarge bounding box - center_palm_bbox = np.sum(palm_bbox, axis=0) / 2 - wh_palm_bbox = palm_bbox[1] - palm_bbox[0] - if for_rotation: - enlarge_scale = self.PALM_BOX_PRE_ENLARGE_FACTOR - else: - enlarge_scale = self.PALM_BOX_ENLARGE_FACTOR - new_half_size = wh_palm_bbox * enlarge_scale / 2 - palm_bbox = np.array([ - center_palm_bbox - new_half_size, - center_palm_bbox + new_half_size]) - palm_bbox = palm_bbox.astype(np.int32) - palm_bbox[:, 0] = np.clip(palm_bbox[:, 0], 0, image.shape[1]) - palm_bbox[:, 1] = np.clip(palm_bbox[:, 1], 0, image.shape[0]) - # crop to the size of interest - image = image[palm_bbox[0][1]:palm_bbox[1][1], palm_bbox[0][0]:palm_bbox[1][0], :] - # pad to ensure conner pixels won't be cropped - if for_rotation: - side_len = np.linalg.norm(image.shape[:2]) - else: - side_len = max(image.shape[:2]) - - side_len = int(side_len) - pad_h = side_len - image.shape[0] - pad_w = side_len - image.shape[1] - left = pad_w // 2 - top = pad_h // 2 - right = pad_w - left - bottom = pad_h - top - image = cv.copyMakeBorder(image, top, bottom, left, right, cv.BORDER_CONSTANT, None, (0, 0, 0)) - bias = palm_bbox[0] - [left, top] - return image, palm_bbox, bias - - def _preprocess(self, image, palm): - ''' - Rotate input for inference. - Parameters: - image - input image of BGR channel order - palm_bbox - palm bounding box found in image of format [[x1, y1], [x2, y2]] (top-left and bottom-right points) - palm_landmarks - 7 landmarks (5 finger base points, 2 palm base points) of shape [7, 2] - Returns: - rotated_hand - rotated hand image for inference - rotate_palm_bbox - palm box of interest range - angle - rotate angle for hand - rotation_matrix - matrix for rotation and de-rotation - pad_bias - pad pixels of interest range - ''' - # crop and pad image to interest range - pad_bias = np.array([0, 0], dtype=np.int32) # left, top - palm_bbox = palm[0:4].reshape(2, 2) - image, palm_bbox, bias = self._cropAndPadFromPalm(image, palm_bbox, True) - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - pad_bias += bias - - # Rotate input to have vertically oriented hand image - # compute rotation - palm_bbox -= pad_bias - palm_landmarks = palm[4:18].reshape(7, 2) - pad_bias - p1 = palm_landmarks[self.PALM_LANDMARKS_INDEX_OF_PALM_BASE] - p2 = palm_landmarks[self.PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE] - radians = np.pi / 2 - np.arctan2(-(p2[1] - p1[1]), p2[0] - p1[0]) - radians = radians - 2 * np.pi * np.floor((radians + np.pi) / (2 * np.pi)) - angle = np.rad2deg(radians) - # get bbox center - center_palm_bbox = np.sum(palm_bbox, axis=0) / 2 - # get rotation matrix - rotation_matrix = cv.getRotationMatrix2D(center_palm_bbox, angle, 1.0) - # get rotated image - rotated_image = cv.warpAffine(image, rotation_matrix, (image.shape[1], image.shape[0])) - # get bounding boxes from rotated palm landmarks - homogeneous_coord = np.c_[palm_landmarks, np.ones(palm_landmarks.shape[0])] - rotated_palm_landmarks = np.array([ - np.dot(homogeneous_coord, rotation_matrix[0]), - np.dot(homogeneous_coord, rotation_matrix[1])]) - # get landmark bounding box - rotated_palm_bbox = np.array([ - np.amin(rotated_palm_landmarks, axis=1), - np.amax(rotated_palm_landmarks, axis=1)]) # [top-left, bottom-right] - - crop, rotated_palm_bbox, _ = self._cropAndPadFromPalm(rotated_image, rotated_palm_bbox) - blob = cv.resize(crop, dsize=self.input_size, interpolation=cv.INTER_AREA).astype(np.float32) - blob = blob / 255. - - return blob[np.newaxis, :, :, :], rotated_palm_bbox, angle, rotation_matrix, pad_bias - - def infer(self, image, palm): - # Preprocess - input_blob, rotated_palm_bbox, angle, rotation_matrix, pad_bias = self._preprocess(image, palm) - - # Forward - self.model.setInput(input_blob) - output_blob = self.model.forward(self.model.getUnconnectedOutLayersNames()) - - # Postprocess - results = self._postprocess(output_blob, rotated_palm_bbox, angle, rotation_matrix, pad_bias) - return results # [bbox_coords, landmarks_coords, conf] - - def _postprocess(self, blob, rotated_palm_bbox, angle, rotation_matrix, pad_bias): - landmarks, conf, handedness, landmarks_word = blob - - conf = conf[0][0] - if conf < self.conf_threshold: - return None - - landmarks = landmarks[0].reshape(-1, 3) # shape: (1, 63) -> (21, 3) - landmarks_word = landmarks_word[0].reshape(-1, 3) # shape: (1, 63) -> (21, 3) - - # transform coords back to the input coords - wh_rotated_palm_bbox = rotated_palm_bbox[1] - rotated_palm_bbox[0] - scale_factor = wh_rotated_palm_bbox / self.input_size - landmarks[:, :2] = (landmarks[:, :2] - self.input_size / 2) * max(scale_factor) - landmarks[:, 2] = landmarks[:, 2] * max(scale_factor) # depth scaling - coords_rotation_matrix = cv.getRotationMatrix2D((0, 0), angle, 1.0) - rotated_landmarks = np.dot(landmarks[:, :2], coords_rotation_matrix[:, :2]) - rotated_landmarks = np.c_[rotated_landmarks, landmarks[:, 2]] - rotated_landmarks_world = np.dot(landmarks_word[:, :2], coords_rotation_matrix[:, :2]) - rotated_landmarks_world = np.c_[rotated_landmarks_world, landmarks_word[:, 2]] - # invert rotation - rotation_component = np.array([ - [rotation_matrix[0][0], rotation_matrix[1][0]], - [rotation_matrix[0][1], rotation_matrix[1][1]]]) - translation_component = np.array([ - rotation_matrix[0][2], rotation_matrix[1][2]]) - inverted_translation = np.array([ - -np.dot(rotation_component[0], translation_component), - -np.dot(rotation_component[1], translation_component)]) - inverse_rotation_matrix = np.c_[rotation_component, inverted_translation] - # get box center - center = np.append(np.sum(rotated_palm_bbox, axis=0) / 2, 1) - original_center = np.array([ - np.dot(center, inverse_rotation_matrix[0]), - np.dot(center, inverse_rotation_matrix[1])]) - landmarks[:, :2] = rotated_landmarks[:, :2] + original_center + pad_bias - - # get bounding box from rotated_landmarks - bbox = np.array([ - np.amin(landmarks[:, :2], axis=0), - np.amax(landmarks[:, :2], axis=0)]) # [top-left, bottom-right] - # shift bounding box - wh_bbox = bbox[1] - bbox[0] - shift_vector = self.HAND_BOX_SHIFT_VECTOR * wh_bbox - bbox = bbox + shift_vector - # enlarge bounding box - center_bbox = np.sum(bbox, axis=0) / 2 - wh_bbox = bbox[1] - bbox[0] - new_half_size = wh_bbox * self.HAND_BOX_ENLARGE_FACTOR / 2 - bbox = np.array([ - center_bbox - new_half_size, - center_bbox + new_half_size]) - - # [0: 4]: hand bounding box found in image of format [x1, y1, x2, y2] (top-left and bottom-right points) - # [4: 67]: screen landmarks with format [x1, y1, z1, x2, y2 ... x21, y21, z21], z value is relative to WRIST - # [67: 130]: world landmarks with format [x1, y1, z1, x2, y2 ... x21, y21, z21], 3D metric x, y, z coordinate - # [130]: handedness, (left)[0, 1](right) hand - # [131]: confidence - return np.r_[bbox.reshape(-1), landmarks.reshape(-1), rotated_landmarks_world.reshape(-1), handedness[0][0], conf] diff --git a/models/human_segmentation_pphumanseg/CMakeLists.txt b/models/human_segmentation_pphumanseg/CMakeLists.txt deleted file mode 100644 index 95aec537..00000000 --- a/models/human_segmentation_pphumanseg/CMakeLists.txt +++ /dev/null @@ -1,31 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(CMAKE_CXX_STANDARD 11) -set(project_name "opencv_zoo_human_segmentation") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) - diff --git a/models/human_segmentation_pphumanseg/LICENSE b/models/human_segmentation_pphumanseg/LICENSE deleted file mode 100644 index 94255ff4..00000000 --- a/models/human_segmentation_pphumanseg/LICENSE +++ /dev/null @@ -1,203 +0,0 @@ -Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/human_segmentation_pphumanseg/README.md b/models/human_segmentation_pphumanseg/README.md deleted file mode 100644 index fd644309..00000000 --- a/models/human_segmentation_pphumanseg/README.md +++ /dev/null @@ -1,69 +0,0 @@ -# PPHumanSeg - -This model is ported from [PaddleHub](https://github.com/PaddlePaddle/PaddleHub) using [this script from OpenCV](https://github.com/opencv/opencv/blob/master/samples/dnn/dnn_model_runner/dnn_conversion/paddlepaddle/paddle_humanseg.py). - -**Note**: -- `human_segmentation_pphumanseg_2023mar_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -### Python - -Run the following command to try the demo: - -```shell -# detect on camera input -python demo.py -# detect on an image -python demo.py --input /path/to/image -v - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/opencv_zoo_human_segmentation -# detect on an image -./build/opencv_zoo_human_segmentation -i=/path/to/image -# get help messages -./build/opencv_zoo_human_segmentation -h -``` - -### Example outputs - -![webcam demo](./example_outputs/pphumanseg_demo.gif) - -![messi](./example_outputs/messi.jpg) - ---- -Results of accuracy evaluation with [tools/eval](../../tools/eval). - -| Models | Accuracy | mIoU | -| ------------------ | -------------- | ------------- | -| PPHumanSeg | 0.9656 | 0.9164 | -| PPHumanSeg block | 0.9655 | 0.9162 | -| PPHumanSeg quant | 0.7285 | 0.3642 | - - -\*: 'quant' stands for 'quantized'. -\*\*: 'block' stands for 'blockwise quantized'. - ---- -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- https://arxiv.org/abs/1512.03385 -- https://github.com/opencv/opencv/tree/master/samples/dnn/dnn_model_runner/dnn_conversion/paddlepaddle -- https://github.com/PaddlePaddle/PaddleHub diff --git a/models/human_segmentation_pphumanseg/demo.cpp b/models/human_segmentation_pphumanseg/demo.cpp deleted file mode 100644 index 6408768d..00000000 --- a/models/human_segmentation_pphumanseg/demo.cpp +++ /dev/null @@ -1,226 +0,0 @@ -#include "opencv2/opencv.hpp" - -#include -#include -#include -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -std::vector> backend_target_pairs = { - {DNN_BACKEND_OPENCV, DNN_TARGET_CPU}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA_FP16}, - {DNN_BACKEND_TIMVX, DNN_TARGET_NPU}, - {DNN_BACKEND_CANN, DNN_TARGET_NPU} -}; - -class PPHS -{ -private: - Net model; - string modelPath; - - Scalar imageMean = Scalar(0.5,0.5,0.5); - Scalar imageStd = Scalar(0.5,0.5,0.5); - Size modelInputSize = Size(192, 192); - Size currentSize; - - const String inputNames = "x"; - const String outputNames = "save_infer_model/scale_0.tmp_1"; - - int backend_id; - int target_id; - -public: - PPHS(const string& modelPath, - int backend_id = 0, - int target_id = 0) - : modelPath(modelPath), backend_id(backend_id), target_id(target_id) - { - this->model = readNet(modelPath); - this->model.setPreferableBackend(backend_id); - this->model.setPreferableTarget(target_id); - } - - Mat preprocess(const Mat image) - { - this->currentSize = image.size(); - Mat preprocessed = Mat::zeros(this->modelInputSize, image.type()); - resize(image, preprocessed, this->modelInputSize); - - // image normalization - preprocessed.convertTo(preprocessed, CV_32F, 1.0 / 255.0); - preprocessed -= imageMean; - preprocessed /= imageStd; - - return blobFromImage(preprocessed);; - } - - Mat infer(const Mat image) - { - Mat inputBlob = preprocess(image); - - this->model.setInput(inputBlob, this->inputNames); - Mat outputBlob = this->model.forward(this->outputNames); - - return postprocess(outputBlob); - } - - Mat postprocess(Mat image) - { - reduceArgMax(image,image,1); - image = image.reshape(1,image.size[2]); - image.convertTo(image, CV_32F); - resize(image, image, this->currentSize, 0, 0, INTER_LINEAR); - image.convertTo(image, CV_8U); - - return image; - } - -}; - - -vector getColorMapList(int num_classes) { - num_classes += 1; - - vector cm(num_classes*3, 0); - - int lab, j; - - for (int i = 0; i < num_classes; ++i) { - lab = i; - j = 0; - - while(lab){ - cm[i] |= (((lab >> 0) & 1) << (7 - j)); - cm[i+num_classes] |= (((lab >> 1) & 1) << (7 - j)); - cm[i+2*num_classes] |= (((lab >> 2) & 1) << (7 - j)); - ++j; - lab >>= 3; - } - - } - - cm.erase(cm.begin(), cm.begin()+3); - - return cm; -}; - -Mat visualize(const Mat& image, const Mat& result, float fps = -1.f, float weight = 0.4) -{ - const Scalar& text_color = Scalar(0, 255, 0); - Mat output_image = image.clone(); - - vector color_map = getColorMapList(256); - - Mat cmm(color_map); - - cmm = cmm.reshape(1,{3,256}); - - if (fps >= 0) - { - putText(output_image, format("FPS: %.2f", fps), Point(0, 15), FONT_HERSHEY_SIMPLEX, 0.5, text_color, 2); - } - - Mat c1, c2, c3; - - LUT(result, cmm.row(0), c1); - LUT(result, cmm.row(1), c2); - LUT(result, cmm.row(2), c3); - - Mat pseudo_img; - merge(std::vector{c1,c2,c3}, pseudo_img); - - addWeighted(output_image, weight, pseudo_img, 1 - weight, 0, output_image); - - return output_image; -}; - -string keys = -"{ help h | | Print help message. }" -"{ model m | human_segmentation_pphumanseg_2023mar.onnx | Usage: Path to the model, defaults to human_segmentation_pphumanseg_2023mar.onnx }" -"{ input i | | Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ backend_target t | 0 | Choose one of the backend-target pair to run this demo:\n" - "0: (default) OpenCV implementation + CPU,\n" - "1: CUDA + GPU (CUDA),\n" - "2: CUDA + GPU (CUDA FP16),\n" - "3: TIM-VX + NPU,\n" - "4: CANN + NPU}" -"{ save s | false | Specify to save results.}" -"{ vis v | true | Specify to open a window for result visualization.}" -; - - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Human Segmentation"); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - string modelPath = parser.get("model"); - string inputPath = parser.get("input"); - uint8_t backendTarget = parser.get("backend_target"); - bool saveFlag = parser.get("save"); - bool visFlag = parser.get("vis"); - - if (modelPath.empty()) - CV_Error(Error::StsError, "Model file " + modelPath + " not found"); - - PPHS humanSegmentationModel(modelPath, backend_target_pairs[backendTarget].first, backend_target_pairs[backendTarget].second); - - VideoCapture cap; - if (!inputPath.empty()) - cap.open(samples::findFile(inputPath)); - else - cap.open(0); - - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot opend video or file"); - - Mat frame; - Mat result; - static const std::string kWinName = "Human Segmentation Demo"; - TickMeter tm; - - while (waitKey(1) < 0) - { - cap >> frame; - - if (frame.empty()) - { - if(inputPath.empty()) - cout << "Frame is empty" << endl; - break; - } - - tm.start(); - result = humanSegmentationModel.infer(frame); - tm.stop(); - - Mat res_frame = visualize(frame, result, tm.getFPS()); - - if(visFlag || inputPath.empty()) - { - imshow(kWinName, res_frame); - if(!inputPath.empty()) - waitKey(0); - } - if(saveFlag) - { - cout << "Results are saved to result.jpg" << endl; - - imwrite("result.jpg", res_frame); - } - } - - return 0; -} - diff --git a/models/human_segmentation_pphumanseg/demo.py b/models/human_segmentation_pphumanseg/demo.py deleted file mode 100644 index df28f570..00000000 --- a/models/human_segmentation_pphumanseg/demo.py +++ /dev/null @@ -1,162 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from pphumanseg import PPHumanSeg - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='PPHumanSeg (https://github.com/PaddlePaddle/PaddleSeg/tree/release/2.2/contrib/PP-HumanSeg)') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set input path to a certain image, omit if using camera.') -parser.add_argument('--model', '-m', type=str, default='human_segmentation_pphumanseg_2023mar.onnx', - help='Usage: Set model path, defaults to human_segmentation_pphumanseg_2023mar.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save a file with results. Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def get_color_map_list(num_classes): - """ - Returns the color map for visualizing the segmentation mask, - which can support arbitrary number of classes. - - Args: - num_classes (int): Number of classes. - - Returns: - (list). The color map. - """ - - num_classes += 1 - color_map = num_classes * [0, 0, 0] - for i in range(0, num_classes): - j = 0 - lab = i - while lab: - color_map[i * 3] |= (((lab >> 0) & 1) << (7 - j)) - color_map[i * 3 + 1] |= (((lab >> 1) & 1) << (7 - j)) - color_map[i * 3 + 2] |= (((lab >> 2) & 1) << (7 - j)) - j += 1 - lab >>= 3 - color_map = color_map[3:] - return color_map - -def visualize(image, result, weight=0.6, fps=None): - """ - Convert predict result to color image, and save added image. - - Args: - image (str): The input image. - result (np.ndarray): The predict result of image. - weight (float): The image weight of visual image, and the result weight is (1 - weight). Default: 0.6 - fps (str): The FPS to be drawn on the input image. - - Returns: - vis_result (np.ndarray): The visualized result. - """ - color_map = get_color_map_list(256) - color_map = np.array(color_map).reshape(256, 3).astype(np.uint8) - - # Use OpenCV LUT for color mapping - c1 = cv.LUT(result, color_map[:, 0]) - c2 = cv.LUT(result, color_map[:, 1]) - c3 = cv.LUT(result, color_map[:, 2]) - pseudo_img = np.dstack((c1, c2, c3)) - - vis_result = cv.addWeighted(image, weight, pseudo_img, 1 - weight, 0) - - if fps is not None: - cv.putText(vis_result, 'FPS: {:.2f}'.format(fps), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0)) - - return vis_result - - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - # Instantiate PPHumanSeg - model = PPHumanSeg(modelPath=args.model, backendId=backend_id, targetId=target_id) - - if args.input is not None: - # Read image and resize to 192x192 - image = cv.imread(args.input) - h, w, _ = image.shape - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - _image = cv.resize(image, dsize=(192, 192)) - - # Inference - result = model.infer(_image) - result = cv.resize(result[0, :, :], dsize=(w, h), interpolation=cv.INTER_NEAREST) - - # Draw results on the input image - image = visualize(image, result) - - # Save results if save is true - if args.save: - print('Results saved to result.jpg\n') - cv.imwrite('result.jpg', image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - w = int(cap.get(cv.CAP_PROP_FRAME_WIDTH)) - h = int(cap.get(cv.CAP_PROP_FRAME_HEIGHT)) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - _frame = cv.cvtColor(frame, cv.COLOR_BGR2RGB) - _frame = cv.resize(_frame, dsize=(192, 192)) - - # Inference - tm.start() - result = model.infer(_frame) - tm.stop() - result = cv.resize(result[0, :, :], dsize=(w, h), interpolation=cv.INTER_NEAREST) - - # Draw results on the input image - frame = visualize(frame, result, fps=tm.getFPS()) - - # Visualize results in a new window - cv.imshow('PPHumanSeg Demo', frame) - - tm.reset() - diff --git a/models/human_segmentation_pphumanseg/example_outputs/messi.jpg b/models/human_segmentation_pphumanseg/example_outputs/messi.jpg deleted file mode 100644 index 6a152708..00000000 --- a/models/human_segmentation_pphumanseg/example_outputs/messi.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:92fff88f42578a1bb46755ac11530fdcbb7b5f9a06ab478a45b1995feb1cd4e6 -size 62004 diff --git a/models/human_segmentation_pphumanseg/example_outputs/pphumanseg_demo.gif b/models/human_segmentation_pphumanseg/example_outputs/pphumanseg_demo.gif deleted file mode 100644 index 122ab0d4..00000000 --- a/models/human_segmentation_pphumanseg/example_outputs/pphumanseg_demo.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a386278ce825418231a371a0a2990f63ab0dc976bf03164517d9491150d34400 -size 548204 diff --git a/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar.onnx b/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar.onnx deleted file mode 100644 index d2921c1d..00000000 --- a/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:552d8a984054e59b5d773d24b9b12022b22046ceb2bbc4c9aaeaceb36a9ddf24 -size 6163938 diff --git a/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8.onnx b/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8.onnx deleted file mode 100644 index d1eea02a..00000000 --- a/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:510775a9e23c1a53c34013a2fa3ac1906bfd7b789d55c07e6b49f30bb669007d -size 1607872 diff --git a/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8bq.onnx b/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8bq.onnx deleted file mode 100644 index d925e472..00000000 --- a/models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1da023e95e3adbeef16ac2862767b53b86a743ff26a34692e0833d7e088f0231 -size 1734724 diff --git a/models/human_segmentation_pphumanseg/pphumanseg.py b/models/human_segmentation_pphumanseg/pphumanseg.py deleted file mode 100644 index be41351a..00000000 --- a/models/human_segmentation_pphumanseg/pphumanseg.py +++ /dev/null @@ -1,69 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import numpy as np -import cv2 as cv - -class PPHumanSeg: - def __init__(self, modelPath, backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - - self._model = cv.dnn.readNet(self._modelPath) - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - self._inputNames = '' - self._outputNames = ['save_infer_model/scale_0.tmp_1'] - self._currentInputSize = None - self._inputSize = [192, 192] - self._mean = np.array([0.5, 0.5, 0.5])[np.newaxis, np.newaxis, :] - self._std = np.array([0.5, 0.5, 0.5])[np.newaxis, np.newaxis, :] - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def _preprocess(self, image): - - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - - self._currentInputSize = image.shape - image = cv.resize(image, (192, 192)) - - image = image.astype(np.float32, copy=False) / 255.0 - image -= self._mean - image /= self._std - return cv.dnn.blobFromImage(image) - - def infer(self, image): - - # Preprocess - inputBlob = self._preprocess(image) - - # Forward - self._model.setInput(inputBlob, self._inputNames) - outputBlob = self._model.forward() - - # Postprocess - results = self._postprocess(outputBlob) - - return results - - def _postprocess(self, outputBlob): - - outputBlob = outputBlob[0] - outputBlob = cv.resize(outputBlob.transpose(1,2,0), (self._currentInputSize[1], self._currentInputSize[0]), interpolation=cv.INTER_LINEAR).transpose(2,0,1)[np.newaxis, ...] - - result = np.argmax(outputBlob, axis=1).astype(np.uint8) - return result diff --git a/models/image_classification_mobilenet/CMakeLists.txt b/models/image_classification_mobilenet/CMakeLists.txt deleted file mode 100644 index b4005c04..00000000 --- a/models/image_classification_mobilenet/CMakeLists.txt +++ /dev/null @@ -1,29 +0,0 @@ -cmake_minimum_required(VERSION 3.2) -set(project_name "opencv_zoo_image_classification_mobilenet") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/image_classification_mobilenet/LICENSE b/models/image_classification_mobilenet/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/image_classification_mobilenet/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/image_classification_mobilenet/README.md b/models/image_classification_mobilenet/README.md deleted file mode 100644 index a30e0094..00000000 --- a/models/image_classification_mobilenet/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# MobileNets - -MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications - -MobileNetV2: Inverted Residuals and Linear Bottlenecks - -**Note**: -- `image_classification_mobilenetvX_2022apr_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -Results of accuracy evaluation with [tools/eval](../../tools/eval). - -| Models | Top-1 Accuracy | Top-5 Accuracy | -| ------------------ | -------------- | -------------- | -| MobileNet V1 | 67.64 | 87.97 | -| MobileNet V1 block | 67.21 | 87.62 | -| MobileNet V1 quant | 55.53 | 78.74 | -| MobileNet V2 | 69.44 | 89.23 | -| MobileNet V2 block | 68.66 | 88.90 | -| MobileNet V2 quant | 68.37 | 88.56 | - -\*: 'quant' stands for 'quantized'. -\*\*: 'block' stands for 'blockwise quantized'. - -## Demo - -### Python - -Run the following command to try the demo: - -```shell -# MobileNet V1 -python demo.py --input /path/to/image -# MobileNet V2 -python demo.py --input /path/to/image --model v2 - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/opencv_zoo_image_classification_mobilenet -# detect on an image -./build/opencv_zoo_image_classification_mobilenet -m=/path/to/model -i=/path/to/image -v -# get help messages -./build/opencv_zoo_image_classification_mobilenet -h -``` - - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- MobileNet V1: https://arxiv.org/abs/1704.04861 -- MobileNet V2: https://arxiv.org/abs/1801.04381 -- MobileNet V1 weight and scripts for training: https://github.com/wjc852456/pytorch-mobilenet-v1 -- MobileNet V2 weight: https://github.com/onnx/models/tree/main/vision/classification/mobilenet diff --git a/models/image_classification_mobilenet/demo.cpp b/models/image_classification_mobilenet/demo.cpp deleted file mode 100644 index 22612877..00000000 --- a/models/image_classification_mobilenet/demo.cpp +++ /dev/null @@ -1,133 +0,0 @@ -#include -#include -#include - -#include -#include "labelsimagenet1k.h" - -using namespace std; -using namespace cv; -using namespace dnn; - -vector< pair > backendTargetPairs = { - std::make_pair(dnn::DNN_BACKEND_OPENCV, dnn::DNN_TARGET_CPU), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA_FP16), - std::make_pair(dnn::DNN_BACKEND_TIMVX, dnn::DNN_TARGET_NPU), - std::make_pair(dnn::DNN_BACKEND_CANN, dnn::DNN_TARGET_NPU) }; - - -std::string keys = -"{ help h | | Print help message. }" -"{ model m | image_classification_mobilenetv1_2022apr.onnx | Usage: Set model type, defaults to image_classification_mobilenetv1_2022apr.onnx (v1) }" -"{ input i | | Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ initial_width | 0 | Preprocess input image by initial resizing to a specific width.}" -"{ initial_height | 0 | Preprocess input image by initial resizing to a specific height.}" -"{ rgb | true | swap R and B plane.}" -"{ crop | false | Preprocess input image by center cropping.}" -"{ vis v | true | Usage: Specify to open a new window to show results.}" -"{ backend bt | 0 | Choose one of computation backends: " -"0: (default) OpenCV implementation + CPU, " -"1: CUDA + GPU (CUDA), " -"2: CUDA + GPU (CUDA FP16), " -"3: TIM-VX + NPU, " -"4: CANN + NPU}"; - - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Use this script to run classification deep learning networks in opencv Zoo using OpenCV."); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - int rszWidth = parser.get("initial_width"); - int rszHeight = parser.get("initial_height"); - bool swapRB = parser.get("rgb"); - bool crop = parser.get("crop"); - bool vis = parser.get("vis"); - String model = parser.get("model"); - int backendTargetid = parser.get("backend"); - - if (model.empty()) - { - CV_Error(Error::StsError, "Model file " + model + " not found"); - } - vector labels = getLabelsImagenet1k(); - - Net net = readNet(samples::findFile(model)); - net.setPreferableBackend(backendTargetPairs[backendTargetid].first); - net.setPreferableTarget(backendTargetPairs[backendTargetid].second); - //! [Open a video file or an image file or a camera stream] - VideoCapture cap; - if (parser.has("input")) - cap.open(samples::findFile(parser.get("input"))); - else - cap.open(0); - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot open video or file"); - Mat frame, blob; - static const std::string kWinName = model; - int nbInference = 0; - while (waitKey(1) < 0) - { - cap >> frame; - if (frame.empty()) - { - cout << "Frame is empty" << endl; - waitKey(); - break; - } - - if (rszWidth != 0 && rszHeight != 0) - { - resize(frame, frame, Size(rszWidth, rszHeight)); - } - Image2BlobParams paramMobilenet; - paramMobilenet.datalayout = DNN_LAYOUT_NCHW; - paramMobilenet.ddepth = CV_32F; - paramMobilenet.mean = Scalar(123.675, 116.28, 103.53); - paramMobilenet.scalefactor = Scalar(1 / (255. * 0.229), 1 / (255. * 0.224), 1 / (255. * 0.225)); - paramMobilenet.size = Size(224, 224); - paramMobilenet.swapRB = swapRB; - if (crop) - paramMobilenet.paddingmode = DNN_PMODE_CROP_CENTER; - else - paramMobilenet.paddingmode = DNN_PMODE_NULL; - //! [Create a 4D blob from a frame] - blobFromImageWithParams(frame, blob, paramMobilenet); - - //! [Set input blob] - net.setInput(blob); - Mat prob = net.forward(); - - //! [Get a class with a highest score] - Point classIdPoint; - double confidence; - minMaxLoc(prob.reshape(1, 1), 0, &confidence, 0, &classIdPoint); - int classId = classIdPoint.x; - std::string label = format("%s: %.4f", (labels.empty() ? format("Class #%d", classId).c_str() : - labels[classId].c_str()), - confidence); - if (vis) - { - putText(frame, label, Point(0, 55), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0, 255, 0)); - imshow(kWinName, frame); - } - else - { - cout << label << endl; - nbInference++; - if (nbInference > 100) - { - cout << nbInference << " inference made. Demo existing" << endl; - break; - } - } - } - return 0; -} diff --git a/models/image_classification_mobilenet/demo.py b/models/image_classification_mobilenet/demo.py deleted file mode 100644 index 4aa990af..00000000 --- a/models/image_classification_mobilenet/demo.py +++ /dev/null @@ -1,56 +0,0 @@ -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from mobilenet import MobileNet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='Demo for MobileNet V1 & V2.') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set input path to a certain image, omit if using camera.') -parser.add_argument('--model', '-m', type=str, default='image_classification_mobilenetv1_2022apr.onnx', - help='Usage: Set model type, defaults to image_classification_mobilenetv1_2022apr.onnx (v1).') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--top_k', type=int, default=1, - help='Usage: Get top k predictions.') -args = parser.parse_args() - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - top_k = args.top_k - # Instantiate MobileNet - model = MobileNet(modelPath=args.model, topK=top_k, backendId=backend_id, targetId=target_id) - - # Read image and get a 224x224 crop from a 256x256 resized - image = cv.imread(args.input) - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - image = cv.resize(image, dsize=(256, 256)) - image = image[16:240, 16:240, :] - - # Inference - result = model.infer(image) - - # Print result - print('label: {}'.format(result)) diff --git a/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr.onnx b/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr.onnx deleted file mode 100644 index a7dd869f..00000000 --- a/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cf4c563b54e7144d7c2803ee22a6471e68ae87377554724d501624f602395bea -size 16890136 diff --git a/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8.onnx b/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8.onnx deleted file mode 100644 index 240b151a..00000000 --- a/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ef32077ef2f8f37ddafeeb1d29a0662e7a794d61190552730769a96b7d58e6df -size 4321622 diff --git a/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8bq.onnx b/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8bq.onnx deleted file mode 100644 index 1949a473..00000000 --- a/models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4437385b4908011f6e3019ae253031272c19522ae1cbdff8374bceffe262a5ee -size 4599388 diff --git a/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr.onnx b/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr.onnx deleted file mode 100644 index 20731372..00000000 --- a/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c0c3f76d93fa3fd6580652a45618618a220fced18babf65774ed169de0432ad5 -size 13964571 diff --git a/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8.onnx b/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8.onnx deleted file mode 100644 index 63db23c8..00000000 --- a/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cc028fe6cae7bc11a4ff53cfc9b79c920e8be65ce33a904ec3e2a8f66d77f95f -size 3655033 diff --git a/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8bq.onnx b/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8bq.onnx deleted file mode 100644 index c744fb6a..00000000 --- a/models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3fe457eda49d71b664918ae87349bdb3e3815a56bb82cdb7f477bc83597f4313 -size 3872948 diff --git a/models/image_classification_mobilenet/labelsimagenet1k.h b/models/image_classification_mobilenet/labelsimagenet1k.h deleted file mode 100644 index 4ca30280..00000000 --- a/models/image_classification_mobilenet/labelsimagenet1k.h +++ /dev/null @@ -1,1010 +0,0 @@ -#include -#include - -std::vector getLabelsImagenet1k() -{ - std::vector labels = { - "tench, Tinca tinca", - "goldfish, Carassius auratus", - "great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias", - "tiger shark, Galeocerdo cuvieri", - "hammerhead, hammerhead shark", - "electric ray, crampfish, numbfish, torpedo", - "stingray", - "cock", - "hen", - "ostrich, Struthio camelus", - "brambling, Fringilla montifringilla", - "goldfinch, Carduelis carduelis", - "house finch, linnet, Carpodacus mexicanus", - "junco, snowbird", - "indigo bunting, indigo finch, indigo bird, Passerina cyanea", - "robin, American robin, Turdus migratorius", - "bulbul", - "jay", - "magpie", - "chickadee", - "water ouzel, dipper", - "kite", - "bald eagle, American eagle, Haliaeetus leucocephalus", - "vulture", - "great grey owl, great gray owl, Strix nebulosa", - "European fire salamander, Salamandra salamandra", - "common newt, Triturus vulgaris", - "eft", - "spotted salamander, Ambystoma maculatum", - "axolotl, mud puppy, Ambystoma mexicanum", - "bullfrog, Rana catesbeiana", - "tree frog, tree-frog", - "tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui", - "loggerhead, loggerhead turtle, Caretta caretta", - "leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea", - "mud turtle", - "terrapin", - "box turtle, box tortoise", - "banded gecko", - "common iguana, iguana, Iguana iguana", - "American chameleon, anole, Anolis carolinensis", - "whiptail, whiptail lizard", - "agama", - "frilled lizard, Chlamydosaurus kingi", - "alligator lizard", - "Gila monster, Heloderma suspectum", - "green lizard, Lacerta viridis", - "African chameleon, Chamaeleo chamaeleon", - "Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis", - "African crocodile, Nile crocodile, Crocodylus niloticus", - "American alligator, Alligator mississipiensis", - "triceratops", - "thunder snake, worm snake, Carphophis amoenus", - "ringneck snake, ring-necked snake, ring snake", - "hognose snake, puff adder, sand viper", - "green snake, grass snake", - "king snake, kingsnake", - "garter snake, grass snake", - "water snake", - "vine snake", - "night snake, Hypsiglena torquata", - "boa constrictor, Constrictor constrictor", - "rock python, rock snake, Python sebae", - "Indian cobra, Naja naja", - "green mamba", - "sea snake", - "horned viper, cerastes, sand viper, horned asp, Cerastes cornutus", - "diamondback, diamondback rattlesnake, Crotalus adamanteus", - "sidewinder, horned rattlesnake, Crotalus cerastes", - "trilobite", - "harvestman, daddy longlegs, Phalangium opilio", - "scorpion", - "black and gold garden spider, Argiope aurantia", - "barn spider, Araneus cavaticus", - "garden spider, Aranea diademata", - "black widow, Latrodectus mactans", - "tarantula", - "wolf spider, hunting spider", - "tick", - "centipede", - "black grouse", - "ptarmigan", - "ruffed grouse, partridge, Bonasa umbellus", - "prairie chicken, prairie grouse, prairie fowl", - "peacock", - "quail", - "partridge", - "African grey, African gray, Psittacus erithacus", - "macaw", - "sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita", - "lorikeet", - "coucal", - "bee eater", - "hornbill", - "hummingbird", - "jacamar", - "toucan", - "drake", - "red-breasted merganser, Mergus serrator", - "goose", - "black swan, Cygnus atratus", - "tusker", - "echidna, spiny anteater, anteater", - "platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus", - "wallaby, brush kangaroo", - "koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus", - "wombat", - "jellyfish", - "sea anemone, anemone", - "brain coral", - "flatworm, platyhelminth", - "nematode, nematode worm, roundworm", - "conch", - "snail", - "slug", - "sea slug, nudibranch", - "chiton, coat-of-mail shell, sea cradle, polyplacophore", - "chambered nautilus, pearly nautilus, nautilus", - "Dungeness crab, Cancer magister", - "rock crab, Cancer irroratus", - "fiddler crab", - "king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica", - "American lobster, Northern lobster, Maine lobster, Homarus americanus", - "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", - "crayfish, crawfish, crawdad, crawdaddy", - "hermit crab", - "isopod", - "white stork, Ciconia ciconia", - "black stork, Ciconia nigra", - "spoonbill", - "flamingo", - "little blue heron, Egretta caerulea", - "American egret, great white heron, Egretta albus", - "bittern", - "crane", - "limpkin, Aramus pictus", - "European gallinule, Porphyrio porphyrio", - "American coot, marsh hen, mud hen, water hen, Fulica americana", - "bustard", - "ruddy turnstone, Arenaria interpres", - "red-backed sandpiper, dunlin, Erolia alpina", - "redshank, Tringa totanus", - "dowitcher", - "oystercatcher, oyster catcher", - "pelican", - "king penguin, Aptenodytes patagonica", - "albatross, mollymawk", - "grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus", - "killer whale, killer, orca, grampus, sea wolf, Orcinus orca", - "dugong, Dugong dugon", - "sea lion", - "Chihuahua", - "Japanese spaniel", - "Maltese dog, Maltese terrier, Maltese", - "Pekinese, Pekingese, Peke", - "Shih-Tzu", - "Blenheim spaniel", - "papillon", - "toy terrier", - "Rhodesian ridgeback", - "Afghan hound, Afghan", - "basset, basset hound", - "beagle", - "bloodhound, sleuthhound", - "bluetick", - "black-and-tan coonhound", - "Walker hound, Walker foxhound", - "English foxhound", - "redbone", - "borzoi, Russian wolfhound", - "Irish wolfhound", - "Italian greyhound", - "whippet", - "Ibizan hound, Ibizan Podenco", - "Norwegian elkhound, elkhound", - "otterhound, otter hound", - "Saluki, gazelle hound", - "Scottish deerhound, deerhound", - "Weimaraner", - "Staffordshire bullterrier, Staffordshire bull terrier", - "American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier", - "Bedlington terrier", - "Border terrier", - "Kerry blue terrier", - "Irish terrier", - "Norfolk terrier", - "Norwich terrier", - "Yorkshire terrier", - "wire-haired fox terrier", - "Lakeland terrier", - "Sealyham terrier, Sealyham", - "Airedale, Airedale terrier", - "cairn, cairn terrier", - "Australian terrier", - "Dandie Dinmont, Dandie Dinmont terrier", - "Boston bull, Boston terrier", - "miniature schnauzer", - "giant schnauzer", - "standard schnauzer", - "Scotch terrier, Scottish terrier, Scottie", - "Tibetan terrier, chrysanthemum dog", - "silky terrier, Sydney silky", - "soft-coated wheaten terrier", - "West Highland white terrier", - "Lhasa, Lhasa apso", - "flat-coated retriever", - "curly-coated retriever", - "golden retriever", - "Labrador retriever", - "Chesapeake Bay retriever", - "German short-haired pointer", - "vizsla, Hungarian pointer", - "English setter", - "Irish setter, red setter", - "Gordon setter", - "Brittany spaniel", - "clumber, clumber spaniel", - "English springer, English springer spaniel", - "Welsh springer spaniel", - "cocker spaniel, English cocker spaniel, cocker", - "Sussex spaniel", - "Irish water spaniel", - "kuvasz", - "schipperke", - "groenendael", - "malinois", - "briard", - "kelpie", - "komondor", - "Old English sheepdog, bobtail", - "Shetland sheepdog, Shetland sheep dog, Shetland", - "collie", - "Border collie", - "Bouvier des Flandres, Bouviers des Flandres", - "Rottweiler", - "German shepherd, German shepherd dog, German police dog, alsatian", - "Doberman, Doberman pinscher", - "miniature pinscher", - "Greater Swiss Mountain dog", - "Bernese mountain dog", - "Appenzeller", - "EntleBucher", - "boxer", - "bull mastiff", - "Tibetan mastiff", - "French bulldog", - "Great Dane", - "Saint Bernard, St Bernard", - "Eskimo dog, husky", - "malamute, malemute, Alaskan malamute", - "Siberian husky", - "dalmatian, coach dog, carriage dog", - "affenpinscher, monkey pinscher, monkey dog", - "basenji", - "pug, pug-dog", - "Leonberg", - "Newfoundland, Newfoundland dog", - "Great Pyrenees", - "Samoyed, Samoyede", - "Pomeranian", - "chow, chow chow", - "keeshond", - "Brabancon griffon", - "Pembroke, Pembroke Welsh corgi", - "Cardigan, Cardigan Welsh corgi", - "toy poodle", - "miniature poodle", - "standard poodle", - "Mexican hairless", - "timber wolf, grey wolf, gray wolf, Canis lupus", - "white wolf, Arctic wolf, Canis lupus tundrarum", - "red wolf, maned wolf, Canis rufus, Canis niger", - "coyote, prairie wolf, brush wolf, Canis latrans", - "dingo, warrigal, warragal, Canis dingo", - "dhole, Cuon alpinus", - "African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus", - "hyena, hyaena", - "red fox, Vulpes vulpes", - "kit fox, Vulpes macrotis", - "Arctic fox, white fox, Alopex lagopus", - "grey fox, gray fox, Urocyon cinereoargenteus", - "tabby, tabby cat", - "tiger cat", - "Persian cat", - "Siamese cat, Siamese", - "Egyptian cat", - "cougar, puma, catamount, mountain lion, painter, panther, Felis concolor", - "lynx, catamount", - "leopard, Panthera pardus", - "snow leopard, ounce, Panthera uncia", - "jaguar, panther, Panthera onca, Felis onca", - "lion, king of beasts, Panthera leo", - "tiger, Panthera tigris", - "cheetah, chetah, Acinonyx jubatus", - "brown bear, bruin, Ursus arctos", - "American black bear, black bear, Ursus americanus, Euarctos americanus", - "ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus", - "sloth bear, Melursus ursinus, Ursus ursinus", - "mongoose", - "meerkat, mierkat", - "tiger beetle", - "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", - "ground beetle, carabid beetle", - "long-horned beetle, longicorn, longicorn beetle", - "leaf beetle, chrysomelid", - "dung beetle", - "rhinoceros beetle", - "weevil", - "fly", - "bee", - "ant, emmet, pismire", - "grasshopper, hopper", - "cricket", - "walking stick, walkingstick, stick insect", - "cockroach, roach", - "mantis, mantid", - "cicada, cicala", - "leafhopper", - "lacewing, lacewing fly", - "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", - "damselfly", - "admiral", - "ringlet, ringlet butterfly", - "monarch, monarch butterfly, milkweed butterfly, Danaus plexippus", - "cabbage butterfly", - "sulphur butterfly, sulfur butterfly", - "lycaenid, lycaenid butterfly", - "starfish, sea star", - "sea urchin", - "sea cucumber, holothurian", - "wood rabbit, cottontail, cottontail rabbit", - "hare", - "Angora, Angora rabbit", - "hamster", - "porcupine, hedgehog", - "fox squirrel, eastern fox squirrel, Sciurus niger", - "marmot", - "beaver", - "guinea pig, Cavia cobaya", - "sorrel", - "zebra", - "hog, pig, grunter, squealer, Sus scrofa", - "wild boar, boar, Sus scrofa", - "warthog", - "hippopotamus, hippo, river horse, Hippopotamus amphibius", - "ox", - "water buffalo, water ox, Asiatic buffalo, Bubalus bubalis", - "bison", - "ram, tup", - "bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis", - "ibex, Capra ibex", - "hartebeest", - "impala, Aepyceros melampus", - "gazelle", - "Arabian camel, dromedary, Camelus dromedarius", - "llama", - "weasel", - "mink", - "polecat, fitch, foulmart, foumart, Mustela putorius", - "black-footed ferret, ferret, Mustela nigripes", - "otter", - "skunk, polecat, wood pussy", - "badger", - "armadillo", - "three-toed sloth, ai, Bradypus tridactylus", - "orangutan, orang, orangutang, Pongo pygmaeus", - "gorilla, Gorilla gorilla", - "chimpanzee, chimp, Pan troglodytes", - "gibbon, Hylobates lar", - "siamang, Hylobates syndactylus, Symphalangus syndactylus", - "guenon, guenon monkey", - "patas, hussar monkey, Erythrocebus patas", - "baboon", - "macaque", - "langur", - "colobus, colobus monkey", - "proboscis monkey, Nasalis larvatus", - "marmoset", - "capuchin, ringtail, Cebus capucinus", - "howler monkey, howler", - "titi, titi monkey", - "spider monkey, Ateles geoffroyi", - "squirrel monkey, Saimiri sciureus", - "Madagascar cat, ring-tailed lemur, Lemur catta", - "indri, indris, Indri indri, Indri brevicaudatus", - "Indian elephant, Elephas maximus", - "African elephant, Loxodonta africana", - "lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens", - "giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca", - "barracouta, snoek", - "eel", - "coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch", - "rock beauty, Holocanthus tricolor", - "anemone fish", - "sturgeon", - "gar, garfish, garpike, billfish, Lepisosteus osseus", - "lionfish", - "puffer, pufferfish, blowfish, globefish", - "abacus", - "abaya", - "academic gown, academic robe, judge's robe", - "accordion, piano accordion, squeeze box", - "acoustic guitar", - "aircraft carrier, carrier, flattop, attack aircraft carrier", - "airliner", - "airship, dirigible", - "altar", - "ambulance", - "amphibian, amphibious vehicle", - "analog clock", - "apiary, bee house", - "apron", - "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", - "assault rifle, assault gun", - "backpack, back pack, knapsack, packsack, rucksack, haversack", - "bakery, bakeshop, bakehouse", - "balance beam, beam", - "balloon", - "ballpoint, ballpoint pen, ballpen, Biro", - "Band Aid", - "banjo", - "bannister, banister, balustrade, balusters, handrail", - "barbell", - "barber chair", - "barbershop", - "barn", - "barometer", - "barrel, cask", - "barrow, garden cart, lawn cart, wheelbarrow", - "baseball", - "basketball", - "bassinet", - "bassoon", - "bathing cap, swimming cap", - "bath towel", - "bathtub, bathing tub, bath, tub", - "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", - "beacon, lighthouse, beacon light, pharos", - "beaker", - "bearskin, busby, shako", - "beer bottle", - "beer glass", - "bell cote, bell cot", - "bib", - "bicycle-built-for-two, tandem bicycle, tandem", - "bikini, two-piece", - "binder, ring-binder", - "binoculars, field glasses, opera glasses", - "birdhouse", - "boathouse", - "bobsled, bobsleigh, bob", - "bolo tie, bolo, bola tie, bola", - "bonnet, poke bonnet", - "bookcase", - "bookshop, bookstore, bookstall", - "bottlecap", - "bow", - "bow tie, bow-tie, bowtie", - "brass, memorial tablet, plaque", - "brassiere, bra, bandeau", - "breakwater, groin, groyne, mole, bulwark, seawall, jetty", - "breastplate, aegis, egis", - "broom", - "bucket, pail", - "buckle", - "bulletproof vest", - "bullet train, bullet", - "butcher shop, meat market", - "cab, hack, taxi, taxicab", - "caldron, cauldron", - "candle, taper, wax light", - "cannon", - "canoe", - "can opener, tin opener", - "cardigan", - "car mirror", - "carousel, carrousel, merry-go-round, roundabout, whirligig", - "carpenter's kit, tool kit", - "carton", - "car wheel", - "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM", - "cassette", - "cassette player", - "castle", - "catamaran", - "CD player", - "cello, violoncello", - "cellular telephone, cellular phone, cellphone, cell, mobile phone", - "chain", - "chainlink fence", - "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", - "chain saw, chainsaw", - "chest", - "chiffonier, commode", - "chime, bell, gong", - "china cabinet, china closet", - "Christmas stocking", - "church, church building", - "cinema, movie theater, movie theatre, movie house, picture palace", - "cleaver, meat cleaver, chopper", - "cliff dwelling", - "cloak", - "clog, geta, patten, sabot", - "cocktail shaker", - "coffee mug", - "coffeepot", - "coil, spiral, volute, whorl, helix", - "combination lock", - "computer keyboard, keypad", - "confectionery, confectionary, candy store", - "container ship, containership, container vessel", - "convertible", - "corkscrew, bottle screw", - "cornet, horn, trumpet, trump", - "cowboy boot", - "cowboy hat, ten-gallon hat", - "cradle", - "crane", - "crash helmet", - "crate", - "crib, cot", - "Crock Pot", - "croquet ball", - "crutch", - "cuirass", - "dam, dike, dyke", - "desk", - "desktop computer", - "dial telephone, dial phone", - "diaper, nappy, napkin", - "digital clock", - "digital watch", - "dining table, board", - "dishrag, dishcloth", - "dishwasher, dish washer, dishwashing machine", - "disk brake, disc brake", - "dock, dockage, docking facility", - "dogsled, dog sled, dog sleigh", - "dome", - "doormat, welcome mat", - "drilling platform, offshore rig", - "drum, membranophone, tympan", - "drumstick", - "dumbbell", - "Dutch oven", - "electric fan, blower", - "electric guitar", - "electric locomotive", - "entertainment center", - "envelope", - "espresso maker", - "face powder", - "feather boa, boa", - "file, file cabinet, filing cabinet", - "fireboat", - "fire engine, fire truck", - "fire screen, fireguard", - "flagpole, flagstaff", - "flute, transverse flute", - "folding chair", - "football helmet", - "forklift", - "fountain", - "fountain pen", - "four-poster", - "freight car", - "French horn, horn", - "frying pan, frypan, skillet", - "fur coat", - "garbage truck, dustcart", - "gasmask, respirator, gas helmet", - "gas pump, gasoline pump, petrol pump, island dispenser", - "goblet", - "go-kart", - "golf ball", - "golfcart, golf cart", - "gondola", - "gong, tam-tam", - "gown", - "grand piano, grand", - "greenhouse, nursery, glasshouse", - "grille, radiator grille", - "grocery store, grocery, food market, market", - "guillotine", - "hair slide", - "hair spray", - "half track", - "hammer", - "hamper", - "hand blower, blow dryer, blow drier, hair dryer, hair drier", - "hand-held computer, hand-held microcomputer", - "handkerchief, hankie, hanky, hankey", - "hard disc, hard disk, fixed disk", - "harmonica, mouth organ, harp, mouth harp", - "harp", - "harvester, reaper", - "hatchet", - "holster", - "home theater, home theatre", - "honeycomb", - "hook, claw", - "hoopskirt, crinoline", - "horizontal bar, high bar", - "horse cart, horse-cart", - "hourglass", - "iPod", - "iron, smoothing iron", - "jack-o'-lantern", - "jean, blue jean, denim", - "jeep, landrover", - "jersey, T-shirt, tee shirt", - "jigsaw puzzle", - "jinrikisha, ricksha, rickshaw", - "joystick", - "kimono", - "knee pad", - "knot", - "lab coat, laboratory coat", - "ladle", - "lampshade, lamp shade", - "laptop, laptop computer", - "lawn mower, mower", - "lens cap, lens cover", - "letter opener, paper knife, paperknife", - "library", - "lifeboat", - "lighter, light, igniter, ignitor", - "limousine, limo", - "liner, ocean liner", - "lipstick, lip rouge", - "Loafer", - "lotion", - "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", - "loupe, jeweler's loupe", - "lumbermill, sawmill", - "magnetic compass", - "mailbag, postbag", - "mailbox, letter box", - "maillot", - "maillot, tank suit", - "manhole cover", - "maraca", - "marimba, xylophone", - "mask", - "matchstick", - "maypole", - "maze, labyrinth", - "measuring cup", - "medicine chest, medicine cabinet", - "megalith, megalithic structure", - "microphone, mike", - "microwave, microwave oven", - "military uniform", - "milk can", - "minibus", - "miniskirt, mini", - "minivan", - "missile", - "mitten", - "mixing bowl", - "mobile home, manufactured home", - "Model T", - "modem", - "monastery", - "monitor", - "moped", - "mortar", - "mortarboard", - "mosque", - "mosquito net", - "motor scooter, scooter", - "mountain bike, all-terrain bike, off-roader", - "mountain tent", - "mouse, computer mouse", - "mousetrap", - "moving van", - "muzzle", - "nail", - "neck brace", - "necklace", - "nipple", - "notebook, notebook computer", - "obelisk", - "oboe, hautboy, hautbois", - "ocarina, sweet potato", - "odometer, hodometer, mileometer, milometer", - "oil filter", - "organ, pipe organ", - "oscilloscope, scope, cathode-ray oscilloscope, CRO", - "overskirt", - "oxcart", - "oxygen mask", - "packet", - "paddle, boat paddle", - "paddlewheel, paddle wheel", - "padlock", - "paintbrush", - "pajama, pyjama, pj's, jammies", - "palace", - "panpipe, pandean pipe, syrinx", - "paper towel", - "parachute, chute", - "parallel bars, bars", - "park bench", - "parking meter", - "passenger car, coach, carriage", - "patio, terrace", - "pay-phone, pay-station", - "pedestal, plinth, footstall", - "pencil box, pencil case", - "pencil sharpener", - "perfume, essence", - "Petri dish", - "photocopier", - "pick, plectrum, plectron", - "pickelhaube", - "picket fence, paling", - "pickup, pickup truck", - "pier", - "piggy bank, penny bank", - "pill bottle", - "pillow", - "ping-pong ball", - "pinwheel", - "pirate, pirate ship", - "pitcher, ewer", - "plane, carpenter's plane, woodworking plane", - "planetarium", - "plastic bag", - "plate rack", - "plow, plough", - "plunger, plumber's helper", - "Polaroid camera, Polaroid Land camera", - "pole", - "police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria", - "poncho", - "pool table, billiard table, snooker table", - "pop bottle, soda bottle", - "pot, flowerpot", - "potter's wheel", - "power drill", - "prayer rug, prayer mat", - "printer", - "prison, prison house", - "projectile, missile", - "projector", - "puck, hockey puck", - "punching bag, punch bag, punching ball, punchball", - "purse", - "quill, quill pen", - "quilt, comforter, comfort, puff", - "racer, race car, racing car", - "racket, racquet", - "radiator", - "radio, wireless", - "radio telescope, radio reflector", - "rain barrel", - "recreational vehicle, RV, R.V.", - "reel", - "reflex camera", - "refrigerator, icebox", - "remote control, remote", - "restaurant, eating house, eating place, eatery", - "revolver, six-gun, six-shooter", - "rifle", - "rocking chair, rocker", - "rotisserie", - "rubber eraser, rubber, pencil eraser", - "rugby ball", - "rule, ruler", - "running shoe", - "safe", - "safety pin", - "saltshaker, salt shaker", - "sandal", - "sarong", - "sax, saxophone", - "scabbard", - "scale, weighing machine", - "school bus", - "schooner", - "scoreboard", - "screen, CRT screen", - "screw", - "screwdriver", - "seat belt, seatbelt", - "sewing machine", - "shield, buckler", - "shoe shop, shoe-shop, shoe store", - "shoji", - "shopping basket", - "shopping cart", - "shovel", - "shower cap", - "shower curtain", - "ski", - "ski mask", - "sleeping bag", - "slide rule, slipstick", - "sliding door", - "slot, one-armed bandit", - "snorkel", - "snowmobile", - "snowplow, snowplough", - "soap dispenser", - "soccer ball", - "sock", - "solar dish, solar collector, solar furnace", - "sombrero", - "soup bowl", - "space bar", - "space heater", - "space shuttle", - "spatula", - "speedboat", - "spider web, spider's web", - "spindle", - "sports car, sport car", - "spotlight, spot", - "stage", - "steam locomotive", - "steel arch bridge", - "steel drum", - "stethoscope", - "stole", - "stone wall", - "stopwatch, stop watch", - "stove", - "strainer", - "streetcar, tram, tramcar, trolley, trolley car", - "stretcher", - "studio couch, day bed", - "stupa, tope", - "submarine, pigboat, sub, U-boat", - "suit, suit of clothes", - "sundial", - "sunglass", - "sunglasses, dark glasses, shades", - "sunscreen, sunblock, sun blocker", - "suspension bridge", - "swab, swob, mop", - "sweatshirt", - "swimming trunks, bathing trunks", - "swing", - "switch, electric switch, electrical switch", - "syringe", - "table lamp", - "tank, army tank, armored combat vehicle, armoured combat vehicle", - "tape player", - "teapot", - "teddy, teddy bear", - "television, television system", - "tennis ball", - "thatch, thatched roof", - "theater curtain, theatre curtain", - "thimble", - "thresher, thrasher, threshing machine", - "throne", - "tile roof", - "toaster", - "tobacco shop, tobacconist shop, tobacconist", - "toilet seat", - "torch", - "totem pole", - "tow truck, tow car, wrecker", - "toyshop", - "tractor", - "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", - "tray", - "trench coat", - "tricycle, trike, velocipede", - "trimaran", - "tripod", - "triumphal arch", - "trolleybus, trolley coach, trackless trolley", - "trombone", - "tub, vat", - "turnstile", - "typewriter keyboard", - "umbrella", - "unicycle, monocycle", - "upright, upright piano", - "vacuum, vacuum cleaner", - "vase", - "vault", - "velvet", - "vending machine", - "vestment", - "viaduct", - "violin, fiddle", - "volleyball", - "waffle iron", - "wall clock", - "wallet, billfold, notecase, pocketbook", - "wardrobe, closet, press", - "warplane, military plane", - "washbasin, handbasin, washbowl, lavabo, wash-hand basin", - "washer, automatic washer, washing machine", - "water bottle", - "water jug", - "water tower", - "whiskey jug", - "whistle", - "wig", - "window screen", - "window shade", - "Windsor tie", - "wine bottle", - "wing", - "wok", - "wooden spoon", - "wool, woolen, woollen", - "worm fence, snake fence, snake-rail fence, Virginia fence", - "wreck", - "yawl", - "yurt", - "web site, website, internet site, site", - "comic book", - "crossword puzzle, crossword", - "street sign", - "traffic light, traffic signal, stoplight", - "book jacket, dust cover, dust jacket, dust wrapper", - "menu", - "plate", - "guacamole", - "consomme", - "hot pot, hotpot", - "trifle", - "ice cream, icecream", - "ice lolly, lolly, lollipop, popsicle", - "French loaf", - "bagel, beigel", - "pretzel", - "cheeseburger", - "hotdog, hot dog, red hot", - "mashed potato", - "head cabbage", - "broccoli", - "cauliflower", - "zucchini, courgette", - "spaghetti squash", - "acorn squash", - "butternut squash", - "cucumber, cuke", - "artichoke, globe artichoke", - "bell pepper", - "cardoon", - "mushroom", - "Granny Smith", - "strawberry", - "orange", - "lemon", - "fig", - "pineapple, ananas", - "banana", - "jackfruit, jak, jack", - "custard apple", - "pomegranate", - "hay", - "carbonara", - "chocolate sauce, chocolate syrup", - "dough", - "meat loaf, meatloaf", - "pizza, pizza pie", - "potpie", - "burrito", - "red wine", - "espresso", - "cup", - "eggnog", - "alp", - "bubble", - "cliff, drop, drop-off", - "coral reef", - "geyser", - "lakeside, lakeshore", - "promontory, headland, head, foreland", - "sandbar, sand bar", - "seashore, coast, seacoast, sea-coast", - "valley, vale", - "volcano", - "ballplayer, baseball player", - "groom, bridegroom", - "scuba diver", - "rapeseed", - "daisy", - "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", - "corn", - "acorn", - "hip, rose hip, rosehip", - "buckeye, horse chestnut, conker", - "coral fungus", - "agaric", - "gyromitra", - "stinkhorn, carrion fungus", - "earthstar", - "hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa", - "bolete", - "ear, spike, capitulum", - "toilet tissue, toilet paper, bathroom tissue", - }; - - return labels; -} diff --git a/models/image_classification_mobilenet/mobilenet.py b/models/image_classification_mobilenet/mobilenet.py deleted file mode 100644 index f2cf111d..00000000 --- a/models/image_classification_mobilenet/mobilenet.py +++ /dev/null @@ -1,1078 +0,0 @@ -import numpy as np -import cv2 as cv - -class MobileNet: - ''' - Works with MobileNet V1 & V2. - ''' - - def __init__(self, modelPath, topK=1, loadLabel=True, backendId=0, targetId=0): - self.model_path = modelPath - assert topK >= 1 - self.top_k = topK - self.load_label = loadLabel - self.backend_id = backendId - self.target_id = targetId - - self.model = cv.dnn.readNet(self.model_path) - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - self.input_names = '' - self.output_names = '' - self.input_size = [224, 224] - self.mean=[0.485, 0.456, 0.406] - self.std=[0.229, 0.224, 0.225] - - # load labels - self._labels = self._load_labels() - - def _load_labels(self): - return self.LABELS_IMAGENET_1K.splitlines() - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self.backend_id = backendId - self.target_id = targetId - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - def _preprocess(self, image): - input_blob = (image / 255.0 - self.mean) / self.std - input_blob = input_blob.transpose(2, 0, 1) - input_blob = input_blob[np.newaxis, :, :, :] - input_blob = input_blob.astype(np.float32) - return input_blob - - def infer(self, image): - # Preprocess - input_blob = self._preprocess(image) - - # Forward - self.model.setInput(input_blob, self.input_names) - output_blob = self.model.forward(self.output_names) - - # Postprocess - results = self._postprocess(output_blob) - - return results - - def _postprocess(self, output_blob): - batched_class_id_list = [] - for o in output_blob: - class_id_list = o.argsort()[::-1][:self.top_k] - batched_class_id_list.append(class_id_list) - if len(self._labels) > 0 and self.load_label: - batched_predicted_labels = [] - for class_id_list in batched_class_id_list: - predicted_labels = [] - for class_id in class_id_list: - predicted_labels.append(self._labels[class_id]) - batched_predicted_labels.append(predicted_labels) - return batched_predicted_labels - else: - return batched_class_id_list - - LABELS_IMAGENET_1K = '''tench -goldfish -great white shark -tiger shark -hammerhead -electric ray -stingray -cock -hen -ostrich -brambling -goldfinch -house finch -junco -indigo bunting -robin -bulbul -jay -magpie -chickadee -water ouzel -kite -bald eagle -vulture -great grey owl -European fire salamander -common newt -eft -spotted salamander -axolotl -bullfrog -tree frog -tailed frog -loggerhead -leatherback turtle -mud turtle -terrapin -box turtle -banded gecko -common iguana -American chameleon -whiptail -agama -frilled lizard -alligator lizard -Gila monster -green lizard -African chameleon -Komodo dragon -African crocodile -American alligator -triceratops -thunder snake -ringneck snake -hognose snake -green snake -king snake -garter snake -water snake -vine snake -night snake -boa constrictor -rock python -Indian cobra -green mamba -sea snake -horned viper -diamondback -sidewinder -trilobite -harvestman -scorpion -black and gold garden spider -barn spider -garden spider -black widow -tarantula -wolf spider -tick -centipede -black grouse -ptarmigan -ruffed grouse -prairie chicken -peacock -quail -partridge -African grey -macaw -sulphur-crested cockatoo -lorikeet -coucal -bee eater -hornbill -hummingbird -jacamar -toucan -drake -red-breasted merganser -goose -black swan -tusker -echidna -platypus -wallaby -koala -wombat -jellyfish -sea anemone -brain coral -flatworm -nematode -conch -snail -slug -sea slug -chiton -chambered nautilus -Dungeness crab -rock crab -fiddler crab -king crab -American lobster -spiny lobster -crayfish -hermit crab -isopod -white stork -black stork -spoonbill -flamingo -little blue heron -American egret -bittern -crane -limpkin -European gallinule -American coot -bustard -ruddy turnstone -red-backed sandpiper -redshank -dowitcher -oystercatcher -pelican -king penguin -albatross -grey whale -killer whale -dugong -sea lion -Chihuahua -Japanese spaniel -Maltese dog -Pekinese -Shih-Tzu -Blenheim spaniel -papillon -toy terrier -Rhodesian ridgeback -Afghan hound -basset -beagle -bloodhound -bluetick -black-and-tan coonhound -Walker hound -English foxhound -redbone -borzoi -Irish wolfhound -Italian greyhound -whippet -Ibizan hound -Norwegian elkhound -otterhound -Saluki -Scottish deerhound -Weimaraner -Staffordshire bullterrier -American Staffordshire terrier -Bedlington terrier -Border terrier -Kerry blue terrier -Irish terrier -Norfolk terrier -Norwich terrier -Yorkshire terrier -wire-haired fox terrier -Lakeland terrier -Sealyham terrier -Airedale -cairn -Australian terrier -Dandie Dinmont -Boston bull -miniature schnauzer -giant schnauzer -standard schnauzer -Scotch terrier -Tibetan terrier -silky terrier -soft-coated wheaten terrier -West Highland white terrier -Lhasa -flat-coated retriever -curly-coated retriever -golden retriever -Labrador retriever -Chesapeake Bay retriever -German short-haired pointer -vizsla -English setter -Irish setter -Gordon setter -Brittany spaniel -clumber -English springer -Welsh springer spaniel -cocker spaniel -Sussex spaniel -Irish water spaniel -kuvasz -schipperke -groenendael -malinois -briard -kelpie -komondor -Old English sheepdog -Shetland sheepdog -collie -Border collie -Bouvier des Flandres -Rottweiler -German shepherd -Doberman -miniature pinscher -Greater Swiss Mountain dog -Bernese mountain dog -Appenzeller -EntleBucher -boxer -bull mastiff -Tibetan mastiff -French bulldog -Great Dane -Saint Bernard -Eskimo dog -malamute -Siberian husky -dalmatian -affenpinscher -basenji -pug -Leonberg -Newfoundland -Great Pyrenees -Samoyed -Pomeranian -chow -keeshond -Brabancon griffon -Pembroke -Cardigan -toy poodle -miniature poodle -standard poodle -Mexican hairless -timber wolf -white wolf -red wolf -coyote -dingo -dhole -African hunting dog -hyena -red fox -kit fox -Arctic fox -grey fox -tabby -tiger cat -Persian cat -Siamese cat -Egyptian cat -cougar -lynx -leopard -snow leopard -jaguar -lion -tiger -cheetah -brown bear -American black bear -ice bear -sloth bear -mongoose -meerkat -tiger beetle -ladybug -ground beetle -long-horned beetle -leaf beetle -dung beetle -rhinoceros beetle -weevil -fly -bee -ant -grasshopper -cricket -walking stick -cockroach -mantis -cicada -leafhopper -lacewing -dragonfly -damselfly -admiral -ringlet -monarch -cabbage butterfly -sulphur butterfly -lycaenid -starfish -sea urchin -sea cucumber -wood rabbit -hare -Angora -hamster -porcupine -fox squirrel -marmot -beaver -guinea pig -sorrel -zebra -hog -wild boar -warthog -hippopotamus -ox -water buffalo -bison -ram -bighorn -ibex -hartebeest -impala -gazelle -Arabian camel -llama -weasel -mink -polecat -black-footed ferret -otter -skunk -badger -armadillo -three-toed sloth -orangutan -gorilla -chimpanzee -gibbon -siamang -guenon -patas -baboon -macaque -langur -colobus -proboscis monkey -marmoset -capuchin -howler monkey -titi -spider monkey -squirrel monkey -Madagascar cat -indri -Indian elephant -African elephant -lesser panda -giant panda -barracouta -eel -coho -rock beauty -anemone fish -sturgeon -gar -lionfish -puffer -abacus -abaya -academic gown -accordion -acoustic guitar -aircraft carrier -airliner -airship -altar -ambulance -amphibian -analog clock -apiary -apron -ashcan -assault rifle -backpack -bakery -balance beam -balloon -ballpoint -Band Aid -banjo -bannister -barbell -barber chair -barbershop -barn -barometer -barrel -barrow -baseball -basketball -bassinet -bassoon -bathing cap -bath towel -bathtub -beach wagon -beacon -beaker -bearskin -beer bottle -beer glass -bell cote -bib -bicycle-built-for-two -bikini -binder -binoculars -birdhouse -boathouse -bobsled -bolo tie -bonnet -bookcase -bookshop -bottlecap -bow -bow tie -brass -brassiere -breakwater -breastplate -broom -bucket -buckle -bulletproof vest -bullet train -butcher shop -cab -caldron -candle -cannon -canoe -can opener -cardigan -car mirror -carousel -carpenters kit -carton -car wheel -cash machine -cassette -cassette player -castle -catamaran -CD player -cello -cellular telephone -chain -chainlink fence -chain mail -chain saw -chest -chiffonier -chime -china cabinet -Christmas stocking -church -cinema -cleaver -cliff dwelling -cloak -clog -cocktail shaker -coffee mug -coffeepot -coil -combination lock -computer keyboard -confectionery -container ship -convertible -corkscrew -cornet -cowboy boot -cowboy hat -cradle -crane -crash helmet -crate -crib -Crock Pot -croquet ball -crutch -cuirass -dam -desk -desktop computer -dial telephone -diaper -digital clock -digital watch -dining table -dishrag -dishwasher -disk brake -dock -dogsled -dome -doormat -drilling platform -drum -drumstick -dumbbell -Dutch oven -electric fan -electric guitar -electric locomotive -entertainment center -envelope -espresso maker -face powder -feather boa -file -fireboat -fire engine -fire screen -flagpole -flute -folding chair -football helmet -forklift -fountain -fountain pen -four-poster -freight car -French horn -frying pan -fur coat -garbage truck -gasmask -gas pump -goblet -go-kart -golf ball -golfcart -gondola -gong -gown -grand piano -greenhouse -grille -grocery store -guillotine -hair slide -hair spray -half track -hammer -hamper -hand blower -hand-held computer -handkerchief -hard disc -harmonica -harp -harvester -hatchet -holster -home theater -honeycomb -hook -hoopskirt -horizontal bar -horse cart -hourglass -iPod -iron -jack-o-lantern -jean -jeep -jersey -jigsaw puzzle -jinrikisha -joystick -kimono -knee pad -knot -lab coat -ladle -lampshade -laptop -lawn mower -lens cap -letter opener -library -lifeboat -lighter -limousine -liner -lipstick -Loafer -lotion -loudspeaker -loupe -lumbermill -magnetic compass -mailbag -mailbox -maillot -maillot -manhole cover -maraca -marimba -mask -matchstick -maypole -maze -measuring cup -medicine chest -megalith -microphone -microwave -military uniform -milk can -minibus -miniskirt -minivan -missile -mitten -mixing bowl -mobile home -Model T -modem -monastery -monitor -moped -mortar -mortarboard -mosque -mosquito net -motor scooter -mountain bike -mountain tent -mouse -mousetrap -moving van -muzzle -nail -neck brace -necklace -nipple -notebook -obelisk -oboe -ocarina -odometer -oil filter -organ -oscilloscope -overskirt -oxcart -oxygen mask -packet -paddle -paddlewheel -padlock -paintbrush -pajama -palace -panpipe -paper towel -parachute -parallel bars -park bench -parking meter -passenger car -patio -pay-phone -pedestal -pencil box -pencil sharpener -perfume -Petri dish -photocopier -pick -pickelhaube -picket fence -pickup -pier -piggy bank -pill bottle -pillow -ping-pong ball -pinwheel -pirate -pitcher -plane -planetarium -plastic bag -plate rack -plow -plunger -Polaroid camera -pole -police van -poncho -pool table -pop bottle -pot -potters wheel -power drill -prayer rug -printer -prison -projectile -projector -puck -punching bag -purse -quill -quilt -racer -racket -radiator -radio -radio telescope -rain barrel -recreational vehicle -reel -reflex camera -refrigerator -remote control -restaurant -revolver -rifle -rocking chair -rotisserie -rubber eraser -rugby ball -rule -running shoe -safe -safety pin -saltshaker -sandal -sarong -sax -scabbard -scale -school bus -schooner -scoreboard -screen -screw -screwdriver -seat belt -sewing machine -shield -shoe shop -shoji -shopping basket -shopping cart -shovel -shower cap -shower curtain -ski -ski mask -sleeping bag -slide rule -sliding door -slot -snorkel -snowmobile -snowplow -soap dispenser -soccer ball -sock -solar dish -sombrero -soup bowl -space bar -space heater -space shuttle -spatula -speedboat -spider web -spindle -sports car -spotlight -stage -steam locomotive -steel arch bridge -steel drum -stethoscope -stole -stone wall -stopwatch -stove -strainer -streetcar -stretcher -studio couch -stupa -submarine -suit -sundial -sunglass -sunglasses -sunscreen -suspension bridge -swab -sweatshirt -swimming trunks -swing -switch -syringe -table lamp -tank -tape player -teapot -teddy -television -tennis ball -thatch -theater curtain -thimble -thresher -throne -tile roof -toaster -tobacco shop -toilet seat -torch -totem pole -tow truck -toyshop -tractor -trailer truck -tray -trench coat -tricycle -trimaran -tripod -triumphal arch -trolleybus -trombone -tub -turnstile -typewriter keyboard -umbrella -unicycle -upright -vacuum -vase -vault -velvet -vending machine -vestment -viaduct -violin -volleyball -waffle iron -wall clock -wallet -wardrobe -warplane -washbasin -washer -water bottle -water jug -water tower -whiskey jug -whistle -wig -window screen -window shade -Windsor tie -wine bottle -wing -wok -wooden spoon -wool -worm fence -wreck -yawl -yurt -web site -comic book -crossword puzzle -street sign -traffic light -book jacket -menu -plate -guacamole -consomme -hot pot -trifle -ice cream -ice lolly -French loaf -bagel -pretzel -cheeseburger -hotdog -mashed potato -head cabbage -broccoli -cauliflower -zucchini -spaghetti squash -acorn squash -butternut squash -cucumber -artichoke -bell pepper -cardoon -mushroom -Granny Smith -strawberry -orange -lemon -fig -pineapple -banana -jackfruit -custard apple -pomegranate -hay -carbonara -chocolate sauce -dough -meat loaf -pizza -potpie -burrito -red wine -espresso -cup -eggnog -alp -bubble -cliff -coral reef -geyser -lakeside -promontory -sandbar -seashore -valley -volcano -ballplayer -groom -scuba diver -rapeseed -daisy -yellow ladys slipper -corn -acorn -hip -buckeye -coral fungus -agaric -gyromitra -stinkhorn -earthstar -hen-of-the-woods -bolete -ear -toilet tissue''' diff --git a/models/image_classification_ppresnet/CMakeLists.txt b/models/image_classification_ppresnet/CMakeLists.txt deleted file mode 100644 index 9707d1e6..00000000 --- a/models/image_classification_ppresnet/CMakeLists.txt +++ /dev/null @@ -1,32 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(project_name "opencv_zoo_image_classification_ppresnet") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Set C++ compilation standard to C++11 -set(CMAKE_CXX_STANDARD 11) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/image_classification_ppresnet/LICENSE b/models/image_classification_ppresnet/LICENSE deleted file mode 100644 index 94255ff4..00000000 --- a/models/image_classification_ppresnet/LICENSE +++ /dev/null @@ -1,203 +0,0 @@ -Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/image_classification_ppresnet/README.md b/models/image_classification_ppresnet/README.md deleted file mode 100644 index 88cdf6d0..00000000 --- a/models/image_classification_ppresnet/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# ResNet - -Deep Residual Learning for Image Recognition - -This model is ported from [PaddleHub](https://github.com/PaddlePaddle/PaddleHub) using [this script from OpenCV](https://github.com/opencv/opencv/blob/master/samples/dnn/dnn_model_runner/dnn_conversion/paddlepaddle/paddle_resnet50.py). - -**Note**: -- `image_classification_ppresnet50_2022jan_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -Results of accuracy evaluation with [tools/eval](../../tools/eval). - -| Models | Top-1 Accuracy | Top-5 Accuracy | -| --------------- | -------------- | -------------- | -| PP-ResNet | 82.28 | 96.15 | -| PP-ResNet block | 82.27 | 96.15 | -| PP-ResNet quant | 0.22 | 0.96 | - -\*: 'quant' stands for 'quantized'. -\*\*: 'block' stands for 'blockwise quantized'. - -## Demo - -Run the following commands to try the demo: - -### Python - -```shell -python demo.py --input /path/to/image - -# get help regarding various parameters -python demo.py --help -``` -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on an image -./build/opencv_zoo_image_classification_ppresnet -i=/path/to/image - -# detect on an image and display top N classes -./build/opencv_zoo_image_classification_ppresnet -i=/path/to/image -k=N - -# get help messages -./build/opencv_zoo_image_classification_ppresnet -h -``` - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- https://arxiv.org/abs/1512.03385 -- https://github.com/opencv/opencv/tree/master/samples/dnn/dnn_model_runner/dnn_conversion/paddlepaddle -- https://github.com/PaddlePaddle/PaddleHub diff --git a/models/image_classification_ppresnet/demo.cpp b/models/image_classification_ppresnet/demo.cpp deleted file mode 100644 index cb54d3b8..00000000 --- a/models/image_classification_ppresnet/demo.cpp +++ /dev/null @@ -1,1123 +0,0 @@ -#include -#include -#include -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -extern vector LABELS_IMAGENET_1K; - -class PPResNet { -public: - PPResNet(const string& modelPath, int topK, int backendId, int targetId) - : _topK(topK) { - _model = readNet(modelPath); - _model.setPreferableBackend(backendId); - _model.setPreferableTarget(targetId); - } - - Mat preprocess(const Mat& image) - { - Mat floatImage; - image.convertTo(floatImage, CV_32F, 1.0 / 255.0); - subtract(floatImage, _mean, floatImage); - divide(floatImage, _std, floatImage); - return blobFromImage(floatImage); - } - - vector infer(const Mat& image) - { - assert(image.rows == _inputSize.height && image.cols == _inputSize.width); - Mat inputBlob = preprocess(image); - _model.setInput(inputBlob, _inputName); - Mat outputBlob = _model.forward(_outputName); - vector results = postprocess(outputBlob); - return results; - } - - vector postprocess(const Mat& outputBlob) - { - vector class_id_list; - sortIdx(outputBlob, class_id_list, SORT_EVERY_ROW | SORT_DESCENDING); - class_id_list.resize(min(_topK, static_cast(outputBlob.cols))); - vector predicted_labels; - for (int class_id : class_id_list) - { - predicted_labels.push_back(LABELS_IMAGENET_1K[class_id]); - } - return predicted_labels; - } - -private: - Net _model; - int _topK; - const Size _inputSize = Size(224, 224); - const Scalar _mean = Scalar(0.485, 0.456, 0.406); - const Scalar _std = Scalar(0.229, 0.224, 0.225); - string _inputName = ""; - string _outputName = "save_infer_model/scale_0.tmp_0"; -}; - -const vector> backend_target_pairs = -{ - {DNN_BACKEND_OPENCV, DNN_TARGET_CPU}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA_FP16}, - {DNN_BACKEND_TIMVX, DNN_TARGET_NPU}, - {DNN_BACKEND_CANN, DNN_TARGET_NPU} -}; - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, - "{ input i | | Set input path to a certain image, omit if using camera.}" - "{ model m | image_classification_ppresnet50_2022jan.onnx | Set model path.}" - "{ top_k k | 1 | Get top k predictions.}" - "{ backend_target bt | 0 | Choose one of computation backends: " - "0: (default) OpenCV implementation + CPU, " - "1: CUDA + GPU (CUDA), " - "2: CUDA + GPU (CUDA FP16), " - "3: TIM-VX + NPU, " - "4: CANN + NPU}"); - - string inputPath = parser.get("input"); - string modelPath = parser.get("model"); - int backendTarget = parser.get("backend_target"); - int topK = parser.get("top_k"); - - int backendId = backend_target_pairs[backendTarget][0]; - int targetId = backend_target_pairs[backendTarget][1]; - - PPResNet model(modelPath, topK, backendId, targetId); - - // Read image and get a 224x224 crop from a 256x256 resized - Mat image = imread(inputPath); - cvtColor(image, image, COLOR_BGR2RGB); - resize(image, image, Size(256, 256)); - image = image(Rect(16, 16, 224, 224)); - - // Inference - auto predictions = model.infer(image); - - // Print result - if (topK == 1) - { - cout << "Predicted Label: " << predictions[0] << endl; - } - else - { - cout << "Predicted Top-K Labels (in decreasing confidence): " << endl; - for (size_t i = 0; i < predictions.size(); ++i) - { - cout << "(" << i+1 << ") " << predictions[i] << endl; - } - } - - return 0; -} - -vector LABELS_IMAGENET_1K = -{ - "tench", - "goldfish", - "great white shark", - "tiger shark", - "hammerhead", - "electric ray", - "stingray", - "cock", - "hen", - "ostrich", - "brambling", - "goldfinch", - "house finch", - "junco", - "indigo bunting", - "robin", - "bulbul", - "jay", - "magpie", - "chickadee", - "water ouzel", - "kite", - "bald eagle", - "vulture", - "great grey owl", - "European fire salamander", - "common newt", - "eft", - "spotted salamander", - "axolotl", - "bullfrog", - "tree frog", - "tailed frog", - "loggerhead", - "leatherback turtle", - "mud turtle", - "terrapin", - "box turtle", - "banded gecko", - "common iguana", - "American chameleon", - "whiptail", - "agama", - "frilled lizard", - "alligator lizard", - "Gila monster", - "green lizard", - "African chameleon", - "Komodo dragon", - "African crocodile", - "American alligator", - "triceratops", - "thunder snake", - "ringneck snake", - "hognose snake", - "green snake", - "king snake", - "garter snake", - "water snake", - "vine snake", - "night snake", - "boa constrictor", - "rock python", - "Indian cobra", - "green mamba", - "sea snake", - "horned viper", - "diamondback", - "sidewinder", - "trilobite", - "harvestman", - "scorpion", - "black and gold garden spider", - "barn spider", - "garden spider", - "black widow", - "tarantula", - "wolf spider", - "tick", - "centipede", - "black grouse", - "ptarmigan", - "ruffed grouse", - "prairie chicken", - "peacock", - "quail", - "partridge", - "African grey", - "macaw", - "sulphur-crested cockatoo", - "lorikeet", - "coucal", - "bee eater", - "hornbill", - "hummingbird", - "jacamar", - "toucan", - "drake", - "red-breasted merganser", - "goose", - "black swan", - "tusker", - "echidna", - "platypus", - "wallaby", - "koala", - "wombat", - "jellyfish", - "sea anemone", - "brain coral", - "flatworm", - "nematode", - "conch", - "snail", - "slug", - "sea slug", - "chiton", - "chambered nautilus", - "Dungeness crab", - "rock crab", - "fiddler crab", - "king crab", - "American lobster", - "spiny lobster", - "crayfish", - "hermit crab", - "isopod", - "white stork", - "black stork", - "spoonbill", - "flamingo", - "little blue heron", - "American egret", - "bittern", - "crane", - "limpkin", - "European gallinule", - "American coot", - "bustard", - "ruddy turnstone", - "red-backed sandpiper", - "redshank", - "dowitcher", - "oystercatcher", - "pelican", - "king penguin", - "albatross", - "grey whale", - "killer whale", - "dugong", - "sea lion", - "Chihuahua", - "Japanese spaniel", - "Maltese dog", - "Pekinese", - "Shih-Tzu", - "Blenheim spaniel", - "papillon", - "toy terrier", - "Rhodesian ridgeback", - "Afghan hound", - "basset", - "beagle", - "bloodhound", - "bluetick", - "black-and-tan coonhound", - "Walker hound", - "English foxhound", - "redbone", - "borzoi", - "Irish wolfhound", - "Italian greyhound", - "whippet", - "Ibizan hound", - "Norwegian elkhound", - "otterhound", - "Saluki", - "Scottish deerhound", - "Weimaraner", - "Staffordshire bullterrier", - "American Staffordshire terrier", - "Bedlington terrier", - "Border terrier", - "Kerry blue terrier", - "Irish terrier", - "Norfolk terrier", - "Norwich terrier", - "Yorkshire terrier", - "wire-haired fox terrier", - "Lakeland terrier", - "Sealyham terrier", - "Airedale", - "cairn", - "Australian terrier", - "Dandie Dinmont", - "Boston bull", - "miniature schnauzer", - "giant schnauzer", - "standard schnauzer", - "Scotch terrier", - "Tibetan terrier", - "silky terrier", - "soft-coated wheaten terrier", - "West Highland white terrier", - "Lhasa", - "flat-coated retriever", - "curly-coated retriever", - "golden retriever", - "Labrador retriever", - "Chesapeake Bay retriever", - "German short-haired pointer", - "vizsla", - "English setter", - "Irish setter", - "Gordon setter", - "Brittany spaniel", - "clumber", - "English springer", - "Welsh springer spaniel", - "cocker spaniel", - "Sussex spaniel", - "Irish water spaniel", - "kuvasz", - "schipperke", - "groenendael", - "malinois", - "briard", - "kelpie", - "komondor", - "Old English sheepdog", - "Shetland sheepdog", - "collie", - "Border collie", - "Bouvier des Flandres", - "Rottweiler", - "German shepherd", - "Doberman", - "miniature pinscher", - "Greater Swiss Mountain dog", - "Bernese mountain dog", - "Appenzeller", - "EntleBucher", - "boxer", - "bull mastiff", - "Tibetan mastiff", - "French bulldog", - "Great Dane", - "Saint Bernard", - "Eskimo dog", - "malamute", - "Siberian husky", - "dalmatian", - "affenpinscher", - "basenji", - "pug", - "Leonberg", - "Newfoundland", - "Great Pyrenees", - "Samoyed", - "Pomeranian", - "chow", - "keeshond", - "Brabancon griffon", - "Pembroke", - "Cardigan", - "toy poodle", - "miniature poodle", - "standard poodle", - "Mexican hairless", - "timber wolf", - "white wolf", - "red wolf", - "coyote", - "dingo", - "dhole", - "African hunting dog", - "hyena", - "red fox", - "kit fox", - "Arctic fox", - "grey fox", - "tabby", - "tiger cat", - "Persian cat", - "Siamese cat", - "Egyptian cat", - "cougar", - "lynx", - "leopard", - "snow leopard", - "jaguar", - "lion", - "tiger", - "cheetah", - "brown bear", - "American black bear", - "ice bear", - "sloth bear", - "mongoose", - "meerkat", - "tiger beetle", - "ladybug", - "ground beetle", - "long-horned beetle", - "leaf beetle", - "dung beetle", - "rhinoceros beetle", - "weevil", - "fly", - "bee", - "ant", - "grasshopper", - "cricket", - "walking stick", - "cockroach", - "mantis", - "cicada", - "leafhopper", - "lacewing", - "dragonfly", - "damselfly", - "admiral", - "ringlet", - "monarch", - "cabbage butterfly", - "sulphur butterfly", - "lycaenid", - "starfish", - "sea urchin", - "sea cucumber", - "wood rabbit", - "hare", - "Angora", - "hamster", - "porcupine", - "fox squirrel", - "marmot", - "beaver", - "guinea pig", - "sorrel", - "zebra", - "hog", - "wild boar", - "warthog", - "hippopotamus", - "ox", - "water buffalo", - "bison", - "ram", - "bighorn", - "ibex", - "hartebeest", - "impala", - "gazelle", - "Arabian camel", - "llama", - "weasel", - "mink", - "polecat", - "black-footed ferret", - "otter", - "skunk", - "badger", - "armadillo", - "three-toed sloth", - "orangutan", - "gorilla", - "chimpanzee", - "gibbon", - "siamang", - "guenon", - "patas", - "baboon", - "macaque", - "langur", - "colobus", - "proboscis monkey", - "marmoset", - "capuchin", - "howler monkey", - "titi", - "spider monkey", - "squirrel monkey", - "Madagascar cat", - "indri", - "Indian elephant", - "African elephant", - "lesser panda", - "giant panda", - "barracouta", - "eel", - "coho", - "rock beauty", - "anemone fish", - "sturgeon", - "gar", - "lionfish", - "puffer", - "abacus", - "abaya", - "academic gown", - "accordion", - "acoustic guitar", - "aircraft carrier", - "airliner", - "airship", - "altar", - "ambulance", - "amphibian", - "analog clock", - "apiary", - "apron", - "ashcan", - "assault rifle", - "backpack", - "bakery", - "balance beam", - "balloon", - "ballpoint", - "Band Aid", - "banjo", - "bannister", - "barbell", - "barber chair", - "barbershop", - "barn", - "barometer", - "barrel", - "barrow", - "baseball", - "basketball", - "bassinet", - "bassoon", - "bathing cap", - "bath towel", - "bathtub", - "beach wagon", - "beacon", - "beaker", - "bearskin", - "beer bottle", - "beer glass", - "bell cote", - "bib", - "bicycle-built-for-two", - "bikini", - "binder", - "binoculars", - "birdhouse", - "boathouse", - "bobsled", - "bolo tie", - "bonnet", - "bookcase", - "bookshop", - "bottlecap", - "bow", - "bow tie", - "brass", - "brassiere", - "breakwater", - "breastplate", - "broom", - "bucket", - "buckle", - "bulletproof vest", - "bullet train", - "butcher shop", - "cab", - "caldron", - "candle", - "cannon", - "canoe", - "can opener", - "cardigan", - "car mirror", - "carousel", - "carpenter's kit", - "carton", - "car wheel", - "cash machine", - "cassette", - "cassette player", - "castle", - "catamaran", - "CD player", - "cello", - "cellular telephone", - "chain", - "chainlink fence", - "chain mail", - "chain saw", - "chest", - "chiffonier", - "chime", - "china cabinet", - "Christmas stocking", - "church", - "cinema", - "cleaver", - "cliff dwelling", - "cloak", - "clog", - "cocktail shaker", - "coffee mug", - "coffeepot", - "coil", - "combination lock", - "computer keyboard", - "confectionery", - "container ship", - "convertible", - "corkscrew", - "cornet", - "cowboy boot", - "cowboy hat", - "cradle", - "crane", - "crash helmet", - "crate", - "crib", - "Crock Pot", - "croquet ball", - "crutch", - "cuirass", - "dam", - "desk", - "desktop computer", - "dial telephone", - "diaper", - "digital clock", - "digital watch", - "dining table", - "dishrag", - "dishwasher", - "disk brake", - "dock", - "dogsled", - "dome", - "doormat", - "drilling platform", - "drum", - "drumstick", - "dumbbell", - "Dutch oven", - "electric fan", - "electric guitar", - "electric locomotive", - "entertainment center", - "envelope", - "espresso maker", - "face powder", - "feather boa", - "filing cabinet", - "fireboat", - "fire engine", - "fire screen", - "flagpole", - "flute", - "folding chair", - "football helmet", - "forklift", - "fountain", - "fountain pen", - "four-poster", - "freight car", - "French horn", - "frying pan", - "fur coat", - "garbage truck", - "gasmask", - "gas pump", - "goblet", - "go-kart", - "golf ball", - "golfcart", - "gondola", - "gong", - "gown", - "grand piano", - "greenhouse", - "grille", - "grocery store", - "guillotine", - "hair slide", - "hair spray", - "half track", - "hammer", - "hamper", - "hand blower", - "hand-held computer", - "handkerchief", - "hard disc", - "harmonica", - "harp", - "harvester", - "hatchet", - "holster", - "home theater", - "honeycomb", - "hook", - "hoopskirt", - "horizontal bar", - "horse cart", - "hourglass", - "iPod", - "iron", - "jack-o'-lantern", - "jean", - "jeep", - "jersey", - "jigsaw puzzle", - "jinrikisha", - "joystick", - "kimono", - "knee pad", - "knot", - "lab coat", - "ladle", - "lampshade", - "laptop", - "lawn mower", - "lens cap", - "letter opener", - "library", - "lifeboat", - "lighter", - "limousine", - "liner", - "lipstick", - "Loafer", - "lotion", - "loudspeaker", - "loupe", - "lumbermill", - "magnetic compass", - "mailbag", - "mailbox", - "maillot", - "maillot", - "manhole cover", - "maraca", - "marimba", - "mask", - "matchstick", - "maypole", - "maze", - "measuring cup", - "medicine chest", - "megalith", - "microphone", - "microwave", - "military uniform", - "milk can", - "minibus", - "miniskirt", - "minivan", - "missile", - "mitten", - "mixing bowl", - "mobile home", - "Model T", - "modem", - "monastery", - "monitor", - "moped", - "mortar", - "mortarboard", - "mosque", - "mosquito net", - "motor scooter", - "mountain bike", - "mountain tent", - "mouse", - "mousetrap", - "moving van", - "muzzle", - "nail", - "neck brace", - "necklace", - "nipple", - "notebook", - "obelisk", - "oboe", - "ocarina", - "odometer", - "oil filter", - "organ", - "oscilloscope", - "overskirt", - "oxcart", - "oxygen mask", - "packet", - "paddle", - "paddlewheel", - "padlock", - "paintbrush", - "pajama", - "palace", - "panpipe", - "paper towel", - "parachute", - "parallel bars", - "park bench", - "parking meter", - "passenger car", - "patio", - "pay-phone", - "pedestal", - "pencil box", - "pencil sharpener", - "perfume", - "Petri dish", - "photocopier", - "pick", - "pickelhaube", - "picket fence", - "pickup", - "pier", - "piggy bank", - "pill bottle", - "pillow", - "ping-pong ball", - "pinwheel", - "pirate", - "pitcher", - "plane", - "planetarium", - "plastic bag", - "plate rack", - "plow", - "plunger", - "Polaroid camera", - "pole", - "police van", - "poncho", - "pool table", - "pop bottle", - "pot", - "potter's wheel", - "power drill", - "prayer rug", - "printer", - "prison", - "projectile", - "projector", - "puck", - "punching bag", - "purse", - "quill", - "quilt", - "racer", - "racket", - "radiator", - "radio", - "radio telescope", - "rain barrel", - "recreational vehicle", - "reel", - "reflex camera", - "refrigerator", - "remote control", - "restaurant", - "revolver", - "rifle", - "rocking chair", - "rotisserie", - "rubber eraser", - "rugby ball", - "rule", - "running shoe", - "safe", - "safety pin", - "saltshaker", - "sandal", - "sarong", - "sax", - "scabbard", - "scale", - "school bus", - "schooner", - "scoreboard", - "screen", - "screw", - "screwdriver", - "seat belt", - "sewing machine", - "shield", - "shoe shop", - "shoji", - "shopping basket", - "shopping cart", - "shovel", - "shower cap", - "shower curtain", - "ski", - "ski mask", - "sleeping bag", - "slide rule", - "sliding door", - "slot", - "snorkel", - "snowmobile", - "snowplow", - "soap dispenser", - "soccer ball", - "sock", - "solar dish", - "sombrero", - "soup bowl", - "space bar", - "space heater", - "space shuttle", - "spatula", - "speedboat", - "spider web", - "spindle", - "sports car", - "spotlight", - "stage", - "steam locomotive", - "steel arch bridge", - "steel drum", - "stethoscope", - "stole", - "stone wall", - "stopwatch", - "stove", - "strainer", - "streetcar", - "stretcher", - "studio couch", - "stupa", - "submarine", - "suit", - "sundial", - "sunglass", - "sunglasses", - "sunscreen", - "suspension bridge", - "swab", - "sweatshirt", - "swimming trunks", - "swing", - "switch", - "syringe", - "table lamp", - "tank", - "tape player", - "teapot", - "teddy", - "television", - "tennis ball", - "thatch", - "theater curtain", - "thimble", - "thresher", - "throne", - "tile roof", - "toaster", - "tobacco shop", - "toilet seat", - "torch", - "totem pole", - "tow truck", - "toyshop", - "tractor", - "trailer truck", - "tray", - "trench coat", - "tricycle", - "trimaran", - "tripod", - "triumphal arch", - "trolleybus", - "trombone", - "tub", - "turnstile", - "typewriter keyboard", - "umbrella", - "unicycle", - "upright", - "vacuum", - "vase", - "vault", - "velvet", - "vending machine", - "vestment", - "viaduct", - "violin", - "volleyball", - "waffle iron", - "wall clock", - "wallet", - "wardrobe", - "warplane", - "washbasin", - "washer", - "water bottle", - "water jug", - "water tower", - "whiskey jug", - "whistle", - "wig", - "window screen", - "window shade", - "Windsor tie", - "wine bottle", - "wing", - "wok", - "wooden spoon", - "wool", - "worm fence", - "wreck", - "yawl", - "yurt", - "web site", - "comic book", - "crossword puzzle", - "street sign", - "traffic light", - "book jacket", - "menu", - "plate", - "guacamole", - "consomme", - "hot pot", - "trifle", - "ice cream", - "ice lolly", - "French loaf", - "bagel", - "pretzel", - "cheeseburger", - "hotdog", - "mashed potato", - "head cabbage", - "broccoli", - "cauliflower", - "zucchini", - "spaghetti squash", - "acorn squash", - "butternut squash", - "cucumber", - "artichoke", - "bell pepper", - "cardoon", - "mushroom", - "Granny Smith", - "strawberry", - "orange", - "lemon", - "fig", - "pineapple", - "banana", - "jackfruit", - "custard apple", - "pomegranate", - "hay", - "carbonara", - "chocolate sauce", - "dough", - "meatloaf", - "pizza", - "potpie", - "burrito", - "red wine", - "espresso", - "cup", - "eggnog", - "alp", - "bubble", - "cliff", - "coral reef", - "geyser", - "lakeside", - "promontory", - "sandbar", - "seashore", - "valley", - "volcano", - "ballplayer", - "groom", - "scuba diver", - "rapeseed", - "daisy", - "yellow lady's slipper", - "corn", - "acorn", - "hip", - "buckeye", - "coral fungus", - "agaric", - "gyromitra", - "stinkhorn", - "earthstar", - "hen-of-the-woods", - "bolete", - "ear", - "toilet tissue" -}; diff --git a/models/image_classification_ppresnet/demo.py b/models/image_classification_ppresnet/demo.py deleted file mode 100644 index 8157f26f..00000000 --- a/models/image_classification_ppresnet/demo.py +++ /dev/null @@ -1,67 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from ppresnet import PPResNet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='Deep Residual Learning for Image Recognition (https://arxiv.org/abs/1512.03385, https://github.com/PaddlePaddle/PaddleHub)') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set input path to a certain image, omit if using camera.') -parser.add_argument('--model', '-m', type=str, default='image_classification_ppresnet50_2022jan.onnx', - help='Usage: Set model path, defaults to image_classification_ppresnet50_2022jan.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--top_k', type=int, default=1, - help='Usage: Get top k predictions.') -args = parser.parse_args() - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - top_k = args.top_k - # Instantiate ResNet - model = PPResNet(modelPath=args.model, topK=top_k, backendId=backend_id, targetId=target_id) - - # Read image and get a 224x224 crop from a 256x256 resized - image = cv.imread(args.input) - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - image = cv.resize(image, dsize=(256, 256)) - image = image[16:240, 16:240, :] - - # Inference - result = model.infer(image)[0] - - # Print result - if top_k == 1: - print(f"Predicted Label: {result[0]}") - else: - print("Predicted Top-K Labels (in decreasing confidence):") - for i, prediction in enumerate(result): - print(f"({i+1}) {prediction}") diff --git a/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan.onnx b/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan.onnx deleted file mode 100644 index d1e03061..00000000 --- a/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ad5486b0de6c2171ea4d28c734c2fb7c5f64fcdbd97180a0ef515cf4b766a405 -size 102567035 diff --git a/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8.onnx b/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8.onnx deleted file mode 100644 index 50a7c2cc..00000000 --- a/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:574bc954869eef09b40a3968bb19157c8faf4999419dca13cfaa3ee56ab5ecd4 -size 25692063 diff --git a/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8bq.onnx b/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8bq.onnx deleted file mode 100644 index 5dc4f5c5..00000000 --- a/models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:92c4ba8e363cc5114279ca61f62838600f3121481b74b73c744086b64c694003 -size 28093644 diff --git a/models/image_classification_ppresnet/ppresnet.py b/models/image_classification_ppresnet/ppresnet.py deleted file mode 100644 index 8c844ebd..00000000 --- a/models/image_classification_ppresnet/ppresnet.py +++ /dev/null @@ -1,1083 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - - -import numpy as np -import cv2 as cv - -class PPResNet: - def __init__(self, modelPath, topK=1, loadLabel=True, backendId=0, targetId=0): - self._modelPath = modelPath - assert topK >= 1 - self._topK = topK - self._load_label = loadLabel - self._backendId = backendId - self._targetId = targetId - - self._model = cv.dnn.readNet(self._modelPath) - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - self._inputNames = '' - self._outputNames = ['save_infer_model/scale_0.tmp_0'] - self._inputSize = [224, 224] - self._mean = np.array([0.485, 0.456, 0.406])[np.newaxis, np.newaxis, :] - self._std = np.array([0.229, 0.224, 0.225])[np.newaxis, np.newaxis, :] - - # load labels - self._labels = self._load_labels() - - def _load_labels(self): - return self.LABELS_IMAGENET_1K.splitlines() - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def _preprocess(self, image): - image = image.astype(np.float32, copy=False) / 255.0 - image -= self._mean - image /= self._std - return cv.dnn.blobFromImage(image) - - def infer(self, image): - assert image.shape[0] == self._inputSize[1], '{} (height of input image) != {} (preset height)'.format(image.shape[0], self._inputSize[1]) - assert image.shape[1] == self._inputSize[0], '{} (width of input image) != {} (preset width)'.format(image.shape[1], self._inputSize[0]) - - # Preprocess - inputBlob = self._preprocess(image) - - # Forward - self._model.setInput(inputBlob, self._inputNames) - outputBlob = self._model.forward(self._outputNames) - - # Postprocess - results = self._postprocess(outputBlob[0]) - - return results - - def _postprocess(self, outputBlob): - batched_class_id_list = [] - for ob in outputBlob: - class_id_list = ob.argsort()[::-1][:self._topK] - batched_class_id_list.append(class_id_list) - if len(self._labels) > 0 and self._load_label: - batched_predicted_labels = [] - for class_id_list in batched_class_id_list: - predicted_labels = [] - for class_id in class_id_list: - predicted_labels.append(self._labels[class_id]) - batched_predicted_labels.append(predicted_labels) - return batched_predicted_labels - else: - return batched_class_id_list - - LABELS_IMAGENET_1K = '''tench -goldfish -great white shark -tiger shark -hammerhead -electric ray -stingray -cock -hen -ostrich -brambling -goldfinch -house finch -junco -indigo bunting -robin -bulbul -jay -magpie -chickadee -water ouzel -kite -bald eagle -vulture -great grey owl -European fire salamander -common newt -eft -spotted salamander -axolotl -bullfrog -tree frog -tailed frog -loggerhead -leatherback turtle -mud turtle -terrapin -box turtle -banded gecko -common iguana -American chameleon -whiptail -agama -frilled lizard -alligator lizard -Gila monster -green lizard -African chameleon -Komodo dragon -African crocodile -American alligator -triceratops -thunder snake -ringneck snake -hognose snake -green snake -king snake -garter snake -water snake -vine snake -night snake -boa constrictor -rock python -Indian cobra -green mamba -sea snake -horned viper -diamondback -sidewinder -trilobite -harvestman -scorpion -black and gold garden spider -barn spider -garden spider -black widow -tarantula -wolf spider -tick -centipede -black grouse -ptarmigan -ruffed grouse -prairie chicken -peacock -quail -partridge -African grey -macaw -sulphur-crested cockatoo -lorikeet -coucal -bee eater -hornbill -hummingbird -jacamar -toucan -drake -red-breasted merganser -goose -black swan -tusker -echidna -platypus -wallaby -koala -wombat -jellyfish -sea anemone -brain coral -flatworm -nematode -conch -snail -slug -sea slug -chiton -chambered nautilus -Dungeness crab -rock crab -fiddler crab -king crab -American lobster -spiny lobster -crayfish -hermit crab -isopod -white stork -black stork -spoonbill -flamingo -little blue heron -American egret -bittern -crane -limpkin -European gallinule -American coot -bustard -ruddy turnstone -red-backed sandpiper -redshank -dowitcher -oystercatcher -pelican -king penguin -albatross -grey whale -killer whale -dugong -sea lion -Chihuahua -Japanese spaniel -Maltese dog -Pekinese -Shih-Tzu -Blenheim spaniel -papillon -toy terrier -Rhodesian ridgeback -Afghan hound -basset -beagle -bloodhound -bluetick -black-and-tan coonhound -Walker hound -English foxhound -redbone -borzoi -Irish wolfhound -Italian greyhound -whippet -Ibizan hound -Norwegian elkhound -otterhound -Saluki -Scottish deerhound -Weimaraner -Staffordshire bullterrier -American Staffordshire terrier -Bedlington terrier -Border terrier -Kerry blue terrier -Irish terrier -Norfolk terrier -Norwich terrier -Yorkshire terrier -wire-haired fox terrier -Lakeland terrier -Sealyham terrier -Airedale -cairn -Australian terrier -Dandie Dinmont -Boston bull -miniature schnauzer -giant schnauzer -standard schnauzer -Scotch terrier -Tibetan terrier -silky terrier -soft-coated wheaten terrier -West Highland white terrier -Lhasa -flat-coated retriever -curly-coated retriever -golden retriever -Labrador retriever -Chesapeake Bay retriever -German short-haired pointer -vizsla -English setter -Irish setter -Gordon setter -Brittany spaniel -clumber -English springer -Welsh springer spaniel -cocker spaniel -Sussex spaniel -Irish water spaniel -kuvasz -schipperke -groenendael -malinois -briard -kelpie -komondor -Old English sheepdog -Shetland sheepdog -collie -Border collie -Bouvier des Flandres -Rottweiler -German shepherd -Doberman -miniature pinscher -Greater Swiss Mountain dog -Bernese mountain dog -Appenzeller -EntleBucher -boxer -bull mastiff -Tibetan mastiff -French bulldog -Great Dane -Saint Bernard -Eskimo dog -malamute -Siberian husky -dalmatian -affenpinscher -basenji -pug -Leonberg -Newfoundland -Great Pyrenees -Samoyed -Pomeranian -chow -keeshond -Brabancon griffon -Pembroke -Cardigan -toy poodle -miniature poodle -standard poodle -Mexican hairless -timber wolf -white wolf -red wolf -coyote -dingo -dhole -African hunting dog -hyena -red fox -kit fox -Arctic fox -grey fox -tabby -tiger cat -Persian cat -Siamese cat -Egyptian cat -cougar -lynx -leopard -snow leopard -jaguar -lion -tiger -cheetah -brown bear -American black bear -ice bear -sloth bear -mongoose -meerkat -tiger beetle -ladybug -ground beetle -long-horned beetle -leaf beetle -dung beetle -rhinoceros beetle -weevil -fly -bee -ant -grasshopper -cricket -walking stick -cockroach -mantis -cicada -leafhopper -lacewing -dragonfly -damselfly -admiral -ringlet -monarch -cabbage butterfly -sulphur butterfly -lycaenid -starfish -sea urchin -sea cucumber -wood rabbit -hare -Angora -hamster -porcupine -fox squirrel -marmot -beaver -guinea pig -sorrel -zebra -hog -wild boar -warthog -hippopotamus -ox -water buffalo -bison -ram -bighorn -ibex -hartebeest -impala -gazelle -Arabian camel -llama -weasel -mink -polecat -black-footed ferret -otter -skunk -badger -armadillo -three-toed sloth -orangutan -gorilla -chimpanzee -gibbon -siamang -guenon -patas -baboon -macaque -langur -colobus -proboscis monkey -marmoset -capuchin -howler monkey -titi -spider monkey -squirrel monkey -Madagascar cat -indri -Indian elephant -African elephant -lesser panda -giant panda -barracouta -eel -coho -rock beauty -anemone fish -sturgeon -gar -lionfish -puffer -abacus -abaya -academic gown -accordion -acoustic guitar -aircraft carrier -airliner -airship -altar -ambulance -amphibian -analog clock -apiary -apron -ashcan -assault rifle -backpack -bakery -balance beam -balloon -ballpoint -Band Aid -banjo -bannister -barbell -barber chair -barbershop -barn -barometer -barrel -barrow -baseball -basketball -bassinet -bassoon -bathing cap -bath towel -bathtub -beach wagon -beacon -beaker -bearskin -beer bottle -beer glass -bell cote -bib -bicycle-built-for-two -bikini -binder -binoculars -birdhouse -boathouse -bobsled -bolo tie -bonnet -bookcase -bookshop -bottlecap -bow -bow tie -brass -brassiere -breakwater -breastplate -broom -bucket -buckle -bulletproof vest -bullet train -butcher shop -cab -caldron -candle -cannon -canoe -can opener -cardigan -car mirror -carousel -carpenters kit -carton -car wheel -cash machine -cassette -cassette player -castle -catamaran -CD player -cello -cellular telephone -chain -chainlink fence -chain mail -chain saw -chest -chiffonier -chime -china cabinet -Christmas stocking -church -cinema -cleaver -cliff dwelling -cloak -clog -cocktail shaker -coffee mug -coffeepot -coil -combination lock -computer keyboard -confectionery -container ship -convertible -corkscrew -cornet -cowboy boot -cowboy hat -cradle -crane -crash helmet -crate -crib -Crock Pot -croquet ball -crutch -cuirass -dam -desk -desktop computer -dial telephone -diaper -digital clock -digital watch -dining table -dishrag -dishwasher -disk brake -dock -dogsled -dome -doormat -drilling platform -drum -drumstick -dumbbell -Dutch oven -electric fan -electric guitar -electric locomotive -entertainment center -envelope -espresso maker -face powder -feather boa -file -fireboat -fire engine -fire screen -flagpole -flute -folding chair -football helmet -forklift -fountain -fountain pen -four-poster -freight car -French horn -frying pan -fur coat -garbage truck -gasmask -gas pump -goblet -go-kart -golf ball -golfcart -gondola -gong -gown -grand piano -greenhouse -grille -grocery store -guillotine -hair slide -hair spray -half track -hammer -hamper -hand blower -hand-held computer -handkerchief -hard disc -harmonica -harp -harvester -hatchet -holster -home theater -honeycomb -hook -hoopskirt -horizontal bar -horse cart -hourglass -iPod -iron -jack-o-lantern -jean -jeep -jersey -jigsaw puzzle -jinrikisha -joystick -kimono -knee pad -knot -lab coat -ladle -lampshade -laptop -lawn mower -lens cap -letter opener -library -lifeboat -lighter -limousine -liner -lipstick -Loafer -lotion -loudspeaker -loupe -lumbermill -magnetic compass -mailbag -mailbox -maillot -maillot -manhole cover -maraca -marimba -mask -matchstick -maypole -maze -measuring cup -medicine chest -megalith -microphone -microwave -military uniform -milk can -minibus -miniskirt -minivan -missile -mitten -mixing bowl -mobile home -Model T -modem -monastery -monitor -moped -mortar -mortarboard -mosque -mosquito net -motor scooter -mountain bike -mountain tent -mouse -mousetrap -moving van -muzzle -nail -neck brace -necklace -nipple -notebook -obelisk -oboe -ocarina -odometer -oil filter -organ -oscilloscope -overskirt -oxcart -oxygen mask -packet -paddle -paddlewheel -padlock -paintbrush -pajama -palace -panpipe -paper towel -parachute -parallel bars -park bench -parking meter -passenger car -patio -pay-phone -pedestal -pencil box -pencil sharpener -perfume -Petri dish -photocopier -pick -pickelhaube -picket fence -pickup -pier -piggy bank -pill bottle -pillow -ping-pong ball -pinwheel -pirate -pitcher -plane -planetarium -plastic bag -plate rack -plow -plunger -Polaroid camera -pole -police van -poncho -pool table -pop bottle -pot -potters wheel -power drill -prayer rug -printer -prison -projectile -projector -puck -punching bag -purse -quill -quilt -racer -racket -radiator -radio -radio telescope -rain barrel -recreational vehicle -reel -reflex camera -refrigerator -remote control -restaurant -revolver -rifle -rocking chair -rotisserie -rubber eraser -rugby ball -rule -running shoe -safe -safety pin -saltshaker -sandal -sarong -sax -scabbard -scale -school bus -schooner -scoreboard -screen -screw -screwdriver -seat belt -sewing machine -shield -shoe shop -shoji -shopping basket -shopping cart -shovel -shower cap -shower curtain -ski -ski mask -sleeping bag -slide rule -sliding door -slot -snorkel -snowmobile -snowplow -soap dispenser -soccer ball -sock -solar dish -sombrero -soup bowl -space bar -space heater -space shuttle -spatula -speedboat -spider web -spindle -sports car -spotlight -stage -steam locomotive -steel arch bridge -steel drum -stethoscope -stole -stone wall -stopwatch -stove -strainer -streetcar -stretcher -studio couch -stupa -submarine -suit -sundial -sunglass -sunglasses -sunscreen -suspension bridge -swab -sweatshirt -swimming trunks -swing -switch -syringe -table lamp -tank -tape player -teapot -teddy -television -tennis ball -thatch -theater curtain -thimble -thresher -throne -tile roof -toaster -tobacco shop -toilet seat -torch -totem pole -tow truck -toyshop -tractor -trailer truck -tray -trench coat -tricycle -trimaran -tripod -triumphal arch -trolleybus -trombone -tub -turnstile -typewriter keyboard -umbrella -unicycle -upright -vacuum -vase -vault -velvet -vending machine -vestment -viaduct -violin -volleyball -waffle iron -wall clock -wallet -wardrobe -warplane -washbasin -washer -water bottle -water jug -water tower -whiskey jug -whistle -wig -window screen -window shade -Windsor tie -wine bottle -wing -wok -wooden spoon -wool -worm fence -wreck -yawl -yurt -web site -comic book -crossword puzzle -street sign -traffic light -book jacket -menu -plate -guacamole -consomme -hot pot -trifle -ice cream -ice lolly -French loaf -bagel -pretzel -cheeseburger -hotdog -mashed potato -head cabbage -broccoli -cauliflower -zucchini -spaghetti squash -acorn squash -butternut squash -cucumber -artichoke -bell pepper -cardoon -mushroom -Granny Smith -strawberry -orange -lemon -fig -pineapple -banana -jackfruit -custard apple -pomegranate -hay -carbonara -chocolate sauce -dough -meat loaf -pizza -potpie -burrito -red wine -espresso -cup -eggnog -alp -bubble -cliff -coral reef -geyser -lakeside -promontory -sandbar -seashore -valley -volcano -ballplayer -groom -scuba diver -rapeseed -daisy -yellow ladys slipper -corn -acorn -hip -buckeye -coral fungus -agaric -gyromitra -stinkhorn -earthstar -hen-of-the-woods -bolete -ear -toilet tissue''' diff --git a/models/image_segmentation_efficientsam/LICENSE b/models/image_segmentation_efficientsam/LICENSE deleted file mode 100644 index 261eeb9e..00000000 --- a/models/image_segmentation_efficientsam/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/image_segmentation_efficientsam/README.md b/models/image_segmentation_efficientsam/README.md deleted file mode 100644 index 1a4b25ca..00000000 --- a/models/image_segmentation_efficientsam/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# image_segmentation_efficientsam - -EfficientSAM: Leveraged Masked Image Pretraining for Efficient Segment Anything - -Notes: -- The current implementation of the EfficientSAM demo uses the EfficientSAM-Ti model, which is specifically tailored for scenarios requiring higher speed and lightweight. -- image_segmentation_efficientsam_ti_2024may.onnx(supports only single point infering) - - MD5 value: 117d6a6cac60039a20b399cc133c2a60 - - SHA-256 value: e3957d2cd1422855f350aa7b044f47f5b3eafada64b5904ed330b696229e2943 -- image_segmentation_efficientsam_ti_2025april.onnx - - MD5 value: f23cecbb344547c960c933ff454536a3 - - SHA-256 value: 4eb496e0a7259d435b49b66faf1754aa45a5c382a34558ddda9a8c6fe5915d77 -- image_segmentation_efficientsam_ti_2025april_int8.onnx - - MD5 value: a1164f44b0495b82e9807c7256e95a50 - - SHA-256 value: 5ecc8d59a2802c32246e68553e1cf8ce74cf74ba707b84f206eb9181ff774b4e - - -## Demo - -### Python -Run the following command to try the demo: - -```shell -python demo.py --input /path/to/image -``` - -**Click** to select foreground points, **drag** to use box to select and **long press** to select background points on the object you wish to segment in the displayed image. After clicking the **Enter**, the segmentation result will be shown in a new window. Clicking the **Backspace** to clear all the prompts. - -## Result - -Here are some of the sample results that were observed using the model: - -![test1_res.jpg](./example_outputs/example1.png) -![test2_res.jpg](./example_outputs/example2.png) - -Video inference result: - -![sam_present.gif](./example_outputs/sam_present.gif) - -## Model metrics: - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -#### Contributor Details - -## Reference - -- https://arxiv.org/abs/2312.00863 -- https://github.com/yformer/EfficientSAM -- https://github.com/facebookresearch/segment-anything \ No newline at end of file diff --git a/models/image_segmentation_efficientsam/demo.py b/models/image_segmentation_efficientsam/demo.py deleted file mode 100644 index 306945d3..00000000 --- a/models/image_segmentation_efficientsam/demo.py +++ /dev/null @@ -1,247 +0,0 @@ -import argparse -import numpy as np -import cv2 as cv -from efficientSAM import EfficientSAM - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='EfficientSAM Demo') -parser.add_argument('--input', '-i', type=str, - help='Set input path to a certain image.') -parser.add_argument('--model', '-m', type=str, default='image_segmentation_efficientsam_ti_2025april.onnx', - help='Set model path, defaults to image_segmentation_efficientsam_ti_2025april.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--save', '-s', action='store_true', - help='Specify to save a file with results. Invalid in case of camera input.') -args = parser.parse_args() - -# Global configuration -WINDOW_SIZE = (800, 600) # Fixed window size (width, height) -MAX_POINTS = 6 # Maximum allowed points -points = [] # Store clicked coordinates (original image scale) -labels = [] # Point labels (-1: useless, 0: background, 1: foreground, 2: top-left, 3: bottom right) -backend_point = [] -rectangle = False -current_img = None - -def visualize(image, result): - """ - Visualize the inference result on the input image. - - Args: - image (np.ndarray): The input image. - result (np.ndarray): The inference result. - - Returns: - vis_result (np.ndarray): The visualized result. - """ - # get image and mask - vis_result = np.copy(image) - mask = np.copy(result) - # change mask to binary image - t, binary = cv.threshold(mask, 127, 255, cv.THRESH_BINARY) - assert set(np.unique(binary)) <= {0, 255}, "The mask must be a binary image." - # enhance red channel to make the segmentation more obviously - enhancement_factor = 1.8 - red_channel = vis_result[:, :, 2] - # update the channel - red_channel = np.where(binary == 255, np.minimum(red_channel * enhancement_factor, 255), red_channel) - vis_result[:, :, 2] = red_channel - - # draw borders - contours, hierarchy = cv.findContours(binary, cv.RETR_LIST, cv.CHAIN_APPROX_TC89_L1) - cv.drawContours(vis_result, contours, contourIdx = -1, color = (255,255,255), thickness=2) - return vis_result - -def select(event, x, y, flags, param): - """Handle mouse events with coordinate conversion""" - global points, labels, backend_point, rectangle, current_img - orig_img = param['original_img'] - image_window = param['image_window'] - - if event == cv.EVENT_LBUTTONDOWN: - param['mouse_down_time'] = cv.getTickCount() - backend_point = [x, y] - - elif event == cv.EVENT_MOUSEMOVE: - if rectangle == True: - rectangle_change_img = current_img.copy() - cv.rectangle(rectangle_change_img, (backend_point[0], backend_point[1]), (x, y), (255,0,0) , 2) - cv.imshow(image_window, rectangle_change_img) - elif len(backend_point) != 0 and len(points) < MAX_POINTS: - rectangle = True - - - elif event == cv.EVENT_LBUTTONUP: - if len(points) >= MAX_POINTS: - print(f"Maximum points reached {MAX_POINTS}.") - return - - if rectangle == False: - duration = (cv.getTickCount() - param['mouse_down_time'])/cv.getTickFrequency() - label = -1 if duration > 0.5 else 1 # Long press = background - - points.append([backend_point[0], backend_point[1]]) - labels.append(label) - print(f"Added {['background','foreground','background'][label]} point {backend_point}.") - else: - if len(points) + 1 >= MAX_POINTS: - rectangle = False - backend_point.clear() - cv.imshow(image_window, current_img) - print(f"Points reached {MAX_POINTS}, could not add box.") - return - point_leftup = [] - point_rightdown = [] - if x > backend_point[0] or y > backend_point[1]: - point_leftup.extend(backend_point) - point_rightdown.extend([x,y]) - else: - point_leftup.extend([x,y]) - point_rightdown.extend(backend_point) - points.append(point_leftup) - points.append(point_rightdown) - print(f"Added box from {point_leftup} to {point_rightdown}.") - labels.append(2) - labels.append(3) - rectangle = False - backend_point.clear() - - marked_img = orig_img.copy() - top_left = None - for (px, py), lbl in zip(points, labels): - if lbl == -1: - cv.circle(marked_img, (px, py), 5, (0, 0, 255), -1) - elif lbl == 1: - cv.circle(marked_img, (px, py), 5, (0, 255, 0), -1) - elif lbl == 2: - top_left = (px, py) - elif lbl == 3: - bottom_right = (px, py) - cv.rectangle(marked_img, top_left, bottom_right, (255,0,0) , 2) - cv.imshow(image_window, marked_img) - current_img = marked_img.copy() - - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - # Load the EfficientSAM model - model = EfficientSAM(modelPath=args.model) - - if args.input is not None: - # Read image - image = cv.imread(args.input) - if image is None: - print('Could not open or find the image:', args.input) - exit(0) - # create window - image_window = "Origin image" - cv.namedWindow(image_window, cv.WINDOW_NORMAL) - # change window size - rate = 1 - rate1 = 1 - rate2 = 1 - if(image.shape[1]>WINDOW_SIZE[0]): - rate1 = WINDOW_SIZE[0]/image.shape[1] - if(image.shape[0]>WINDOW_SIZE[1]): - rate2 = WINDOW_SIZE[1]/image.shape[0] - rate = min(rate1, rate2) - # width, height - WINDOW_SIZE = (int(image.shape[1] * rate), int(image.shape[0] * rate)) - cv.resizeWindow(image_window, WINDOW_SIZE[0], WINDOW_SIZE[1]) - # put the window on the left of the screen - cv.moveWindow(image_window, 50, 100) - # set listener to record user's click point - param = { - 'original_img': image, - 'mouse_down_time': 0, - 'image_window' : image_window - } - cv.setMouseCallback(image_window, select, param) - # tips in the terminal - print("Click — Select foreground point\n" - "Long press — Select background point\n" - "Drag — Create selection box\n" - "Enter — Infer\n" - "Backspace — Clear the prompts\n" - "Q - Quit") - # show image - cv.imshow(image_window, image) - current_img = image.copy() - # create window to show visualized result - vis_image = image.copy() - segmentation_window = "Segment result" - cv.namedWindow(segmentation_window, cv.WINDOW_NORMAL) - cv.resizeWindow(segmentation_window, WINDOW_SIZE[0], WINDOW_SIZE[1]) - cv.moveWindow(segmentation_window, WINDOW_SIZE[0]+51, 100) - cv.imshow(segmentation_window, vis_image) - # waiting for click - while True: - # Check window status - # if click × to close the image window then ending - if (cv.getWindowProperty(image_window, cv.WND_PROP_VISIBLE) < 1 or - cv.getWindowProperty(segmentation_window, cv.WND_PROP_VISIBLE) < 1): - break - - # Handle keyboard input - key = cv.waitKey(1) - - # receive enter - if key == 13: - - vis_image = image.copy() - cv.putText(vis_image, "infering...", - (50, vis_image.shape[0]//2), - cv.FONT_HERSHEY_SIMPLEX, 10, (255,255,255), 5) - cv.imshow(segmentation_window, vis_image) - - result = model.infer(image=image, points=points, labels=labels) - if len(result) == 0: - print("clear and select points again!") - else: - vis_result = visualize(image, result) - - cv.imshow(segmentation_window, vis_result) - elif key == 8 or key == 127: # ASCII for Backspace or Delete - points.clear() - labels.clear() - backend_point = [] - rectangle = False - current_img = image - print("Points are cleared.") - cv.imshow(image_window, image) - elif key == ord('q') or key == ord('Q'): - break - - cv.destroyAllWindows() - - # Save results if save is true - if args.save: - cv.imwrite('./example_outputs/vis_result.jpg', vis_result) - cv.imwrite("./example_outputs/mask.jpg", result) - print('vis_result.jpg and mask.jpg are saved to ./example_outputs/') - - else: - print('Set input path to a certain image.') - pass - diff --git a/models/image_segmentation_efficientsam/efficientSAM.py b/models/image_segmentation_efficientsam/efficientSAM.py deleted file mode 100644 index 334d8834..00000000 --- a/models/image_segmentation_efficientsam/efficientSAM.py +++ /dev/null @@ -1,136 +0,0 @@ -import numpy as np -import cv2 as cv - -class EfficientSAM: - def __init__(self, modelPath, backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - - self._model = cv.dnn.readNet(self._modelPath) - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - # 3 inputs - self._inputNames = ["batched_images", "batched_point_coords", "batched_point_labels"] - - self._outputNames = ['output_masks', 'iou_predictions'] # actual output layer name - self._currentInputSize = None - self._inputSize = [1024, 1024] # input size for the model - self._maxPointNums = 6 - self._frontGroundPoints = [] - self._backGroundPoints = [] - self._labels = [] - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def _preprocess(self, image, points, labels): - - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - # record the input image size, (width, height) - self._currentInputSize = (image.shape[1], image.shape[0]) - - image = cv.resize(image, self._inputSize) - - image = image.astype(np.float32, copy=False) / 255.0 - - image_blob = cv.dnn.blobFromImage(image) - - points = np.array(points, dtype=np.float32) - labels = np.array(labels, dtype=np.float32) - assert points.shape[0] <= self._maxPointNums, f"Max input points number: {self._maxPointNums}" - assert points.shape[0] == labels.shape[0] - - frontGroundPoints = [] - backGroundPoints = [] - inputLabels = [] - for i in range(len(points)): - if labels[i] == -1: - backGroundPoints.append(points[i]) - else: - frontGroundPoints.append(points[i]) - inputLabels.append(labels[i]) - self._backGroundPoints = np.uint32(backGroundPoints) - # print("input:") - # print(" back: ", self._backGroundPoints) - # print(" front: ", frontGroundPoints) - # print(" label: ", inputLabels) - - # convert points to (1024*1024) size space - for p in frontGroundPoints: - p[0] = np.float32(p[0] * self._inputSize[0]/self._currentInputSize[0]) - p[1] = np.float32(p[1] * self._inputSize[1]/self._currentInputSize[1]) - - if len(frontGroundPoints) > self._maxPointNums: - return "no" - - pad_num = self._maxPointNums - len(frontGroundPoints) - self._frontGroundPoints = np.vstack([frontGroundPoints, np.zeros((pad_num, 2), dtype=np.float32)]) - inputLabels_arr = np.array(inputLabels, dtype=np.float32).reshape(-1, 1) - self._labels = np.vstack([inputLabels_arr, np.full((pad_num, 1), -1, dtype=np.float32)]) - - points_blob = np.array([[self._frontGroundPoints]]) - - labels_blob = np.array([[self._labels]]) - - return image_blob, points_blob, labels_blob - - def infer(self, image, points, labels): - # Preprocess - imageBlob, pointsBlob, labelsBlob = self._preprocess(image, points, labels) - # Forward - self._model.setInput(imageBlob, self._inputNames[0]) - self._model.setInput(pointsBlob, self._inputNames[1]) - self._model.setInput(labelsBlob, self._inputNames[2]) - # print("infering...") - outputs = self._model.forward(self._outputNames) - outputBlob, outputIou = outputs[0], outputs[1] - # Postprocess - results = self._postprocess(outputBlob, outputIou) - # print("done") - return results - - def _postprocess(self, outputBlob, outputIou): - # The masks are already sorted by their predicted IOUs. - # The first dimension is the batch size (we have a single image. so it is 1). - # The second dimension is the number of masks we want to generate - # The third dimension is the number of candidate masks output by the model. - masks = outputBlob[0, 0, :, :, :] >= 0 - ious = outputIou[0, 0, :] - - # sorted by ious - sorted_indices = np.argsort(ious)[::-1] - sorted_masks = masks[sorted_indices] - - # sorted by area - # mask_areas = np.sum(masks, axis=(1, 2)) - # sorted_indices = np.argsort(mask_areas) - # sorted_masks = masks[sorted_indices] - - masks_uint8 = (sorted_masks * 255).astype(np.uint8) - - # change to real image size - resized_masks = [ - cv.resize(mask, dsize=self._currentInputSize, - interpolation=cv.INTER_NEAREST) - for mask in masks_uint8 - ] - - # background mask don't need - for mask in resized_masks: - contains_bg = any( - mask[y, x] if (0 <= x < mask.shape[1] and 0 <= y < mask.shape[0]) - else False - for (x, y) in self._backGroundPoints - ) - if not contains_bg: - return mask - - return resized_masks[0] diff --git a/models/image_segmentation_efficientsam/example_outputs/example1.png b/models/image_segmentation_efficientsam/example_outputs/example1.png deleted file mode 100644 index c20d7834..00000000 --- a/models/image_segmentation_efficientsam/example_outputs/example1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:70065831fb12915dc5a3b4641019bc152a89d6d5be1887bdf7ada432a04e63c5 -size 1993654 diff --git a/models/image_segmentation_efficientsam/example_outputs/example2.png b/models/image_segmentation_efficientsam/example_outputs/example2.png deleted file mode 100644 index 3b0cb955..00000000 --- a/models/image_segmentation_efficientsam/example_outputs/example2.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dfe6860d701b8b707a96d69b6bfc33fd05167168fbb46594f6377ad4e9c1733e -size 1917383 diff --git a/models/image_segmentation_efficientsam/example_outputs/sam_present.gif b/models/image_segmentation_efficientsam/example_outputs/sam_present.gif deleted file mode 100644 index 403a2817..00000000 --- a/models/image_segmentation_efficientsam/example_outputs/sam_present.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ab75c654d4368d1f4762fc71af35c02b6f0a3e21dca4530d22f92fff4134890c -size 103918 diff --git a/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2024may.onnx b/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2024may.onnx deleted file mode 100644 index e6eb2a47..00000000 --- a/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2024may.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e3957d2cd1422855f350aa7b044f47f5b3eafada64b5904ed330b696229e2943 -size 47777193 diff --git a/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april.onnx b/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april.onnx deleted file mode 100644 index 2bf444b7..00000000 --- a/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4eb496e0a7259d435b49b66faf1754aa45a5c382a34558ddda9a8c6fe5915d77 -size 48312857 diff --git a/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april_int8.onnx b/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april_int8.onnx deleted file mode 100644 index 8f7b6907..00000000 --- a/models/image_segmentation_efficientsam/image_segmentation_efficientsam_ti_2025april_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5ecc8d59a2802c32246e68553e1cf8ce74cf74ba707b84f206eb9181ff774b4e -size 20479928 diff --git a/models/inpainting_lama/CMakeLists.txt b/models/inpainting_lama/CMakeLists.txt deleted file mode 100644 index 5eee867c..00000000 --- a/models/inpainting_lama/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.22.1) -project(opencv_zoo_inpainting_lama) - -set(OPENCV_VERSION "5.0.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(demo demo.cpp) -target_link_libraries(demo ${OpenCV_LIBS}) diff --git a/models/inpainting_lama/LICENSE b/models/inpainting_lama/LICENSE deleted file mode 100644 index f542a480..00000000 --- a/models/inpainting_lama/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [2021] Samsung Research - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/inpainting_lama/README.md b/models/inpainting_lama/README.md deleted file mode 100644 index 4a5c0856..00000000 --- a/models/inpainting_lama/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Lama - -LaMa is a very lightweight yet powerful image inpainting model. - -Notes: - -- Model source: [ONNX](https://huggingface.co/Carve/LaMa-ONNX/blob/main/lama_fp32.onnx). - -## Requirements -Install latest OpenCV >=5.0.0 and CMake >= 3.22.1 to get started with. - -## Demo - -### Python - -Run the following command to try the demo: - -```shell -# usage -python demo.py --input /path/to/image - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# usage -./build/demo --input=/path/to/image -# get help messages -./build/demo -h -``` - -### Example outputs - -![chicky](./example_outputs/squirrel_output.jpg) - -## License - -All files in this directory are licensed under [Apache License](./LICENSE). - -## Reference - -- https://github.com/advimman/lama \ No newline at end of file diff --git a/models/inpainting_lama/demo.cpp b/models/inpainting_lama/demo.cpp deleted file mode 100644 index 303ee251..00000000 --- a/models/inpainting_lama/demo.cpp +++ /dev/null @@ -1,174 +0,0 @@ -/* -This sample inpaints the masked area in the given image. - -Copyright (C) 2025, Bigvision LLC. -*/ - -#include -#include - -#include -#include -#include - -using namespace cv; -using namespace dnn; -using namespace std; - -class Lama { -public: - Lama(const string& modelPath) { - loadModel(modelPath); - } - - // Function to set up the input image and process it - void process(const Mat& image, const Mat& mask, Mat& result) { - double aspectRatio = static_cast(image.rows) / static_cast(image.cols); - - Mat image_blob = blobFromImage(image, 1.0/255.0, Size(512, 512), Scalar(0, 0, 0), false, false, CV_32F); - Mat mask_blob = blobFromImage(mask, 1.0, Size(512, 512), Scalar(0), false, false); - - mask_blob = (mask_blob > 0); - mask_blob.convertTo(mask_blob, CV_32F); - mask_blob = mask_blob/255.0; - - net.setInput(image_blob, "image"); - net.setInput(mask_blob, "mask"); - - Mat output = net.forward(); - - postProcess(output, result, aspectRatio); - } -private: - Net net; - - // Load Model - void loadModel(const string modelPath) { - net = readNetFromONNX(modelPath); - net.setPreferableBackend(DNN_BACKEND_DEFAULT); - net.setPreferableTarget(DNN_TARGET_CPU); - } - - void postProcess(const Mat& output, Mat& result, double aspectRatio) { - Mat output_transposed(3, &output.size[1], CV_32F, const_cast(reinterpret_cast(output.ptr()))); - - vector channels; - for (int i = 0; i < 3; ++i) { - channels.push_back(Mat(output_transposed.size[1], output_transposed.size[2], CV_32F, - output_transposed.ptr(i))); - } - merge(channels, result); - result.convertTo(result, CV_8U); - - int h = static_cast(512 * aspectRatio); - resize(result, result, Size(512, h)); - } -}; - - -const string about = "This sample demonstrates image inpainting with lama inpainting technique.\n\n"; - -const string keys = - "{help h | | show help message}" - "{input i | | Path to input image}" - "{ model | inpainting_lama_2024jan.onnx | Path to the lama onnx model file }"; - -bool drawing = false; -Mat maskGray; -int brush_size = 25; - -static void drawMask(int event, int x, int y, int, void*) { - if (event == EVENT_LBUTTONDOWN) { - drawing = true; - } else if (event == EVENT_MOUSEMOVE) { - if (drawing) { - circle(maskGray, Point(x, y), brush_size, Scalar(255), -1); - } - } else if (event == EVENT_LBUTTONUP) { - drawing = false; - } -} - -int main(int argc, char **argv) -{ - CommandLineParser parser(argc, argv, keys); - - if (parser.has("help")) - { - cout<("model"); - - int height = 512; - int width = 512; - int stdSize = 20; - int stdWeight = 400; - int stdImgSize = 512; - int imgWidth = -1; // Initialization - int fontSize = 50; - int fontWeight = 500; - - FontFace fontFace("sans"); - Lama lama(model); - - Mat image = imread(parser.get("input")); - if (image.empty()) { - cerr << "Error: Input image could not be loaded." << endl; - return -1; - } - - imgWidth = min(image.rows, image.cols); - fontSize = min(fontSize, (stdSize*imgWidth)/stdImgSize); - fontWeight = min(fontWeight, (stdWeight*imgWidth)/stdImgSize); - - maskGray = Mat::zeros(image.size(), CV_8U); - - namedWindow("Draw Mask"); - setMouseCallback("Draw Mask", drawMask); - - const string label = "Draw the mask on the image. Press space bar when done "; - - for(;;) { - Mat displayImage = image.clone(); - Mat overlay = image.clone(); - - double alpha = 0.5; - Rect r = getTextSize(Size(), label, Point(), fontFace, fontSize, fontWeight); - r.height += 2 * fontSize; // padding - r.width += 10; // padding - rectangle(overlay, r, Scalar::all(255), FILLED); - addWeighted(overlay, alpha, displayImage, 1 - alpha, 0, displayImage); - putText(displayImage, label, Point(10, fontSize), Scalar(0,0,0), fontFace, fontSize, fontWeight); - putText(displayImage, "Press 'i' to increase and 'd' to decrease brush size", Point(10, 2*fontSize), Scalar(0,0,0), fontFace, fontSize, fontWeight); - - displayImage.setTo(Scalar(255, 255, 255), maskGray > 0); // Highlight mask area - imshow("Draw Mask", displayImage); - - char key = waitKey(1); - if (key == 'i') { - brush_size += 1; - cout << "Brush size increased to " << brush_size << endl; - } else if (key == 'd') { - brush_size = max(1, brush_size - 1); - cout << "Brush size decreased to " << brush_size << endl; - } else if (key == ' ') { - break; - } else if (key == 27){ - return -1; - } - } - destroyAllWindows(); - - Mat result; - lama.process(image, maskGray, result); - - imshow("Inpainted Output", result); - waitKey(0); - - return 0; -} diff --git a/models/inpainting_lama/demo.py b/models/inpainting_lama/demo.py deleted file mode 100644 index 82576abd..00000000 --- a/models/inpainting_lama/demo.py +++ /dev/null @@ -1,87 +0,0 @@ -import cv2 as cv -import numpy as np -import argparse -from lama import Lama - -def get_args_parser(func_args): - parser = argparse.ArgumentParser(add_help=False) - parser.add_argument('--input', help='Path to input image', default=0, required=False) - parser.add_argument('--model', help='Path to lama onnx', default='inpainting_lama_2025jan.onnx', required=False) - - parser = argparse.ArgumentParser(parents=[parser], - description='', formatter_class=argparse.RawTextHelpFormatter) - return parser.parse_args(func_args) - -drawing = False -mask_gray = None -brush_size = 15 - -def draw_mask(event, x, y, flags, param): - global drawing, mask_gray, brush_size - if event == cv.EVENT_LBUTTONDOWN: - drawing = True - elif event == cv.EVENT_MOUSEMOVE: - if drawing: - cv.circle(mask_gray, (x, y), brush_size, (255), thickness=-1) - elif event == cv.EVENT_LBUTTONUP: - drawing = False - -def main(func_args=None): - global mask_gray, brush_size - args = get_args_parser(func_args) - - lama = Lama(modelPath=args.model) - input_image = cv.imread(args.input) - mask_gray = np.zeros((input_image.shape[0], input_image.shape[1]), dtype=np.uint8) - - stdSize = 0.6 - stdWeight = 2 - stdImgSize = 512 - imgWidth = min(input_image.shape[:2]) - fontSize = min(1.5, (stdSize*imgWidth)/stdImgSize) - fontThickness = max(1,(stdWeight*imgWidth)//stdImgSize) - - cv.namedWindow("Draw Mask") - cv.setMouseCallback("Draw Mask", draw_mask) - - label = "Draw the mask on the image. Press space bar when done." - labelSize, _ = cv.getTextSize(label, cv.FONT_HERSHEY_SIMPLEX, fontSize, fontThickness) - while True: - display_image = input_image.copy() - overlay = input_image.copy() - - alpha = 0.5 - cv.rectangle(overlay, (0, 0), (labelSize[0]+10, labelSize[1]+int(30*fontSize)), (255, 255, 255), cv.FILLED) - cv.addWeighted(overlay, alpha, display_image, 1 - alpha, 0, display_image) - - cv.putText(display_image, label, (10, int(25*fontSize)), cv.FONT_HERSHEY_SIMPLEX, fontSize, (0, 0, 0), fontThickness) - cv.putText(display_image, "Press 'i' to increase and 'd' to decrease brush size.", (10, int(50*fontSize)), cv.FONT_HERSHEY_SIMPLEX, fontSize, (0, 0, 0), fontThickness) - display_image[mask_gray > 0] = [255, 255, 255] - cv.imshow("Draw Mask", display_image) - - key = cv.waitKey(1) & 0xFF - if key == ord('i'): # Increase brush size - brush_size += 1 - print(f"Brush size increased to {brush_size}") - elif key == ord('d'): # Decrease brush size - brush_size = max(1, brush_size - 1) - print(f"Brush size decreased to {brush_size}") - elif key == ord(' '): # Press space bar to finish drawing - break - elif key == 27: - exit() - cv.destroyAllWindows() - - tm = cv.TickMeter() - tm.start() - result = lama.infer(input_image, mask_gray) - tm.stop() - label = 'Inference time: {:.2f} ms'.format(tm.getTimeMilli()) - cv.putText(result, label, (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 0)) - - cv.imshow("Inpainted Output", result) - cv.waitKey(0) - cv.destroyAllWindows() - -if __name__ == '__main__': - main() diff --git a/models/inpainting_lama/example_outputs/squirrel.jpg b/models/inpainting_lama/example_outputs/squirrel.jpg deleted file mode 100644 index 0a3909e3..00000000 --- a/models/inpainting_lama/example_outputs/squirrel.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:20bb6e8ae96918a36c9886b6d48e54eedeb3948591e1485c206bc1dc60c8dc8b -size 62311 diff --git a/models/inpainting_lama/example_outputs/squirrel_output.jpg b/models/inpainting_lama/example_outputs/squirrel_output.jpg deleted file mode 100644 index 982f019e..00000000 --- a/models/inpainting_lama/example_outputs/squirrel_output.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:aaa765b3ef286f8de34efc7302e49b078d720c3eb6adf79ee8a2df73f3889f52 -size 63086 diff --git a/models/inpainting_lama/inpainting_lama_2025jan.onnx b/models/inpainting_lama/inpainting_lama_2025jan.onnx deleted file mode 100644 index 425f3a0e..00000000 --- a/models/inpainting_lama/inpainting_lama_2025jan.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7df918ac3921d3daf0aae1d219776cf0dc4e4935f035af81841b40adcf74fdf2 -size 92591623 diff --git a/models/inpainting_lama/lama.py b/models/inpainting_lama/lama.py deleted file mode 100644 index c242ef0c..00000000 --- a/models/inpainting_lama/lama.py +++ /dev/null @@ -1,43 +0,0 @@ -import cv2 as cv -import numpy as np - -class Lama: - def __init__(self, modelPath='inpainting_lama_2025jan.onnx', backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - - # Load the model - self._model = cv.dnn.readNetFromONNX(self._modelPath) - self.setBackendAndTarget(self._backendId, self._targetId) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def infer(self, image, mask): - image_blob = cv.dnn.blobFromImage(image, 0.00392, (512, 512), (0,0,0), False, False) - mask_blob = cv.dnn.blobFromImage(mask, scalefactor=1.0, size=(512, 512), mean=(0,), swapRB=False, crop=False) - mask_blob = (mask_blob > 0).astype(np.float32) - - self._model.setInput(image_blob, "image") - self._model.setInput(mask_blob, "mask") - - output = self._model.forward() - - # Postprocessing - aspect_ratio = image.shape[0]/image.shape[1] - result = output[0] - result = np.transpose(result, (1, 2, 0)) - result = (result).astype(np.uint8) - width = result.shape[1] - height = int(width*aspect_ratio) - result = cv.resize(result, (width, height)) - - return result diff --git a/models/license_plate_detection_yunet/LICENSE b/models/license_plate_detection_yunet/LICENSE deleted file mode 100644 index 5e53223d..00000000 --- a/models/license_plate_detection_yunet/LICENSE +++ /dev/null @@ -1,203 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright (c) 2022 WATRIX - Author: Dong Xu - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/license_plate_detection_yunet/README.md b/models/license_plate_detection_yunet/README.md deleted file mode 100644 index df1cbceb..00000000 --- a/models/license_plate_detection_yunet/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# License Plate Detection with YuNet - -This model is contributed by Dong Xu (徐栋) from [watrix.ai](watrix.ai) (银河水滴). - -Please note that the model is trained with Chinese license plates, so the detection results of other license plates with this model may be limited. - -**Note**: -- `license_plate_detection_lpd_yunet_2023mar_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -Run the following command to try the demo: - -```shell -# detect on camera input -python demo.py -# detect on an image -python demo.py --input /path/to/image -v -# get help regarding various parameters -python demo.py --help -``` - -### Example outputs - -![lpd](./example_outputs/lpd_yunet_demo.gif) - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE) - -## Reference - -- https://github.com/ShiqiYu/libfacedetection.train diff --git a/models/license_plate_detection_yunet/demo.py b/models/license_plate_detection_yunet/demo.py deleted file mode 100644 index 066b63a4..00000000 --- a/models/license_plate_detection_yunet/demo.py +++ /dev/null @@ -1,130 +0,0 @@ -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from lpd_yunet import LPD_YuNet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='LPD-YuNet for License Plate Detection') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='license_plate_detection_lpd_yunet_2023mar.onnx', - help='Usage: Set model path, defaults to license_plate_detection_lpd_yunet_2023mar.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--conf_threshold', type=float, default=0.9, - help='Usage: Set the minimum needed confidence for the model to identify a license plate, defaults to 0.9. Smaller values may result in faster detection, but will limit accuracy. Filter out faces of confidence < conf_threshold.') -parser.add_argument('--nms_threshold', type=float, default=0.3, - help='Usage: Suppress bounding boxes of iou >= nms_threshold. Default = 0.3. Suppress bounding boxes of iou >= nms_threshold.') -parser.add_argument('--top_k', type=int, default=5000, - help='Usage: Keep top_k bounding boxes before NMS.') -parser.add_argument('--keep_top_k', type=int, default=750, - help='Usage: Keep keep_top_k bounding boxes after NMS.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(image, dets, line_color=(0, 255, 0), text_color=(0, 0, 255), fps=None): - output = image.copy() - - if fps is not None: - cv.putText(output, 'FPS: {:.2f}'.format(fps), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, text_color) - - for det in dets: - bbox = det[:-1].astype(np.int32) - x1, y1, x2, y2, x3, y3, x4, y4 = bbox - - # Draw the border of license plate - cv.line(output, (x1, y1), (x2, y2), line_color, 2) - cv.line(output, (x2, y2), (x3, y3), line_color, 2) - cv.line(output, (x3, y3), (x4, y4), line_color, 2) - cv.line(output, (x4, y4), (x1, y1), line_color, 2) - - return output - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate LPD-YuNet - model = LPD_YuNet(modelPath=args.model, - confThreshold=args.conf_threshold, - nmsThreshold=args.nms_threshold, - topK=args.top_k, - keepTopK=args.keep_top_k, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - image = cv.imread(args.input) - h, w, _ = image.shape - - # Inference - model.setInputSize([w, h]) - results = model.infer(image) - - # Print results - print('{} license plates detected.'.format(results.shape[0])) - - # Draw results on the input image - image = visualize(image, results) - - # Save results if save is true - if args.save: - print('Resutls saved to result.jpg') - cv.imwrite('result.jpg', image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - w = int(cap.get(cv.CAP_PROP_FRAME_WIDTH)) - h = int(cap.get(cv.CAP_PROP_FRAME_HEIGHT)) - model.setInputSize([w, h]) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # Inference - tm.start() - results = model.infer(frame) # results is a tuple - tm.stop() - - # Draw results on the input image - frame = visualize(frame, results, fps=tm.getFPS()) - - # Visualize results in a new Window - cv.imshow('LPD-YuNet Demo', frame) - - tm.reset() diff --git a/models/license_plate_detection_yunet/example_outputs/lpd_yunet_demo.gif b/models/license_plate_detection_yunet/example_outputs/lpd_yunet_demo.gif deleted file mode 100644 index f62dc8f1..00000000 --- a/models/license_plate_detection_yunet/example_outputs/lpd_yunet_demo.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e878ac62d49fca94f7eccaa5ac0b60e97508ef8225744a6a898f1bc833cee314 -size 300669 diff --git a/models/license_plate_detection_yunet/example_outputs/result-1.jpg b/models/license_plate_detection_yunet/example_outputs/result-1.jpg deleted file mode 100644 index 6f371d3b..00000000 --- a/models/license_plate_detection_yunet/example_outputs/result-1.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ff4f66031aa7ac82f1e218791c89e0655f3bdaf226a2b7272f0d48b5a62cb083 -size 58506 diff --git a/models/license_plate_detection_yunet/example_outputs/result-2.jpg b/models/license_plate_detection_yunet/example_outputs/result-2.jpg deleted file mode 100644 index 0b876b35..00000000 --- a/models/license_plate_detection_yunet/example_outputs/result-2.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b6a18a5593bdd3794d7c484a9eb1a97f418d0daa5a35938d092e805a10c2df44 -size 55650 diff --git a/models/license_plate_detection_yunet/example_outputs/result-3.jpg b/models/license_plate_detection_yunet/example_outputs/result-3.jpg deleted file mode 100644 index 47f0ba0b..00000000 --- a/models/license_plate_detection_yunet/example_outputs/result-3.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4e9c8cc2ff8272075b73c1352fc93fb5b802737d2a89eefee47859f9737e5640 -size 63523 diff --git a/models/license_plate_detection_yunet/example_outputs/result-4.jpg b/models/license_plate_detection_yunet/example_outputs/result-4.jpg deleted file mode 100644 index f9afaf44..00000000 --- a/models/license_plate_detection_yunet/example_outputs/result-4.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6257486bd6e08c6c1fd80874ad7cc8be2d1ed06e288d16670a04b9b8acb18530 -size 52606 diff --git a/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar.onnx b/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar.onnx deleted file mode 100644 index 8e7b5cc8..00000000 --- a/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6d4978a7b6d25514d5e24811b82bfb511d166bdd8ca3b03aa63c1623d4d039c7 -size 4146213 diff --git a/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8.onnx b/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8.onnx deleted file mode 100644 index 94c15dc1..00000000 --- a/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d67982a014fe93ad04612f565ed23ca010dcb0fd925d880ef0edf9cd7bdf931a -size 1087142 diff --git a/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8bq.onnx b/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8bq.onnx deleted file mode 100644 index 7fee23c0..00000000 --- a/models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8a346e9db6a085a79848903a95cb902b2ab01d47972057f1cf71ede095410b49 -size 1185866 diff --git a/models/license_plate_detection_yunet/lpd_yunet.py b/models/license_plate_detection_yunet/lpd_yunet.py deleted file mode 100644 index 917e58a3..00000000 --- a/models/license_plate_detection_yunet/lpd_yunet.py +++ /dev/null @@ -1,136 +0,0 @@ -from itertools import product - -import numpy as np -import cv2 as cv - -class LPD_YuNet: - def __init__(self, modelPath, inputSize=[320, 240], confThreshold=0.8, nmsThreshold=0.3, topK=5000, keepTopK=750, backendId=0, targetId=0): - self.model_path = modelPath - self.input_size = np.array(inputSize) - self.confidence_threshold=confThreshold - self.nms_threshold = nmsThreshold - self.top_k = topK - self.keep_top_k = keepTopK - self.backend_id = backendId - self.target_id = targetId - - self.output_names = ['loc', 'conf', 'iou'] - self.min_sizes = [[10, 16, 24], [32, 48], [64, 96], [128, 192, 256]] - self.steps = [8, 16, 32, 64] - self.variance = [0.1, 0.2] - - # load model - self.model = cv.dnn.readNet(self.model_path) - # set backend and target - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - # generate anchors/priorboxes - self._priorGen() - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self.backend_id = backendId - self.target_id = targetId - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - def setInputSize(self, inputSize): - self.input_size = inputSize - # re-generate anchors/priorboxes - self._priorGen() - - def _preprocess(self, image): - return cv.dnn.blobFromImage(image) - - def infer(self, image): - assert image.shape[0] == self.input_size[1], '{} (height of input image) != {} (preset height)'.format(image.shape[0], self.input_size[1]) - assert image.shape[1] == self.input_size[0], '{} (width of input image) != {} (preset width)'.format(image.shape[1], self.input_size[0]) - - # Preprocess - inputBlob = self._preprocess(image) - - # Forward - self.model.setInput(inputBlob) - outputBlob = self.model.forward(self.output_names) - - # Postprocess - results = self._postprocess(outputBlob) - - return results - - def _postprocess(self, blob): - # Decode - dets = self._decode(blob) - - # NMS - keepIdx = cv.dnn.NMSBoxes( - bboxes=dets[:, 0:4].tolist(), - scores=dets[:, -1].tolist(), - score_threshold=self.confidence_threshold, - nms_threshold=self.nms_threshold, - top_k=self.top_k - ) # box_num x class_num - if len(keepIdx) > 0: - dets = dets[keepIdx] - return dets[:self.keep_top_k] - else: - return np.empty(shape=(0, 9)) - - def _priorGen(self): - w, h = self.input_size - feature_map_2th = [int(int((h + 1) / 2) / 2), - int(int((w + 1) / 2) / 2)] - feature_map_3th = [int(feature_map_2th[0] / 2), - int(feature_map_2th[1] / 2)] - feature_map_4th = [int(feature_map_3th[0] / 2), - int(feature_map_3th[1] / 2)] - feature_map_5th = [int(feature_map_4th[0] / 2), - int(feature_map_4th[1] / 2)] - feature_map_6th = [int(feature_map_5th[0] / 2), - int(feature_map_5th[1] / 2)] - - feature_maps = [feature_map_3th, feature_map_4th, - feature_map_5th, feature_map_6th] - - priors = [] - for k, f in enumerate(feature_maps): - min_sizes = self.min_sizes[k] - for i, j in product(range(f[0]), range(f[1])): # i->h, j->w - for min_size in min_sizes: - s_kx = min_size / w - s_ky = min_size / h - - cx = (j + 0.5) * self.steps[k] / w - cy = (i + 0.5) * self.steps[k] / h - - priors.append([cx, cy, s_kx, s_ky]) - self.priors = np.array(priors, dtype=np.float32) - - def _decode(self, blob): - loc, conf, iou = blob - # get score - cls_scores = conf[:, 1] - iou_scores = iou[:, 0] - # clamp - _idx = np.where(iou_scores < 0.) - iou_scores[_idx] = 0. - _idx = np.where(iou_scores > 1.) - iou_scores[_idx] = 1. - scores = np.sqrt(cls_scores * iou_scores) - scores = scores[:, np.newaxis] - - scale = self.input_size - - # get four corner points for bounding box - bboxes = np.hstack(( - (self.priors[:, 0:2] + loc[:, 4: 6] * self.variance[0] * self.priors[:, 2:4]) * scale, - (self.priors[:, 0:2] + loc[:, 6: 8] * self.variance[0] * self.priors[:, 2:4]) * scale, - (self.priors[:, 0:2] + loc[:, 10:12] * self.variance[0] * self.priors[:, 2:4]) * scale, - (self.priors[:, 0:2] + loc[:, 12:14] * self.variance[0] * self.priors[:, 2:4]) * scale - )) - - dets = np.hstack((bboxes, scores)) - return dets diff --git a/models/object_detection_nanodet/CMakeLists.txt b/models/object_detection_nanodet/CMakeLists.txt deleted file mode 100644 index 332f487d..00000000 --- a/models/object_detection_nanodet/CMakeLists.txt +++ /dev/null @@ -1,32 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(project_name "opencv_zoo_object_detection_nanodet") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Set C++ compilation standard to C++11 -set(CMAKE_CXX_STANDARD 11) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/object_detection_nanodet/LICENSE b/models/object_detection_nanodet/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/object_detection_nanodet/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/object_detection_nanodet/README.md b/models/object_detection_nanodet/README.md deleted file mode 100644 index 533c0938..00000000 --- a/models/object_detection_nanodet/README.md +++ /dev/null @@ -1,142 +0,0 @@ -# Nanodet - -Nanodet: NanoDet is a FCOS-style one-stage anchor-free object detection model which using Generalized Focal Loss as classification and regression loss.In NanoDet-Plus, we propose a novel label assignment strategy with a simple assign guidance module (AGM) and a dynamic soft label assigner (DSLA) to solve the optimal label assignment problem in lightweight model training. - -**Note**: -- This version of nanodet: Nanodet-m-plus-1.5x_416 -- `object_detection_nanodet_2022nov_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - - -## Demo - -### Python - -Run the following command to try the demo: -```shell -# detect on camera input -python demo.py -# detect on an image -python demo.py --input /path/to/image -v -``` -Note: -- image result saved as "result.jpg" - -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/opencv_zoo_object_detection_nanodet -# detect on an image -./build/opencv_zoo_object_detection_nanodet -i=/path/to/image -# get help messages -./build/opencv_zoo_object_detection_nanodet -h -``` - - -## Results - -Here are some of the sample results that were observed using the model, - -![test1_res.jpg](./example_outputs/1_res.jpg) -![test2_res.jpg](./example_outputs/2_res.jpg) - -Check [benchmark/download_data.py](../../benchmark/download_data.py) for the original images. - -Video inference result, -![WebCamR.gif](./example_outputs/WebCamR.gif) - -## Model metrics: - -The model is evaluated on [COCO 2017 val](https://cocodataset.org/#download). Results are showed below: - - - -
Average Precision Average Recall
- -| area | IoU | Average Precision(AP) | -|:-------|:------|:------------------------| -| all | 0.50:0.95 | 0.304 | -| all | 0.50 | 0.459 | -| all | 0.75 | 0.317 | -| small | 0.50:0.95 | 0.107 | -| medium | 0.50:0.95 | 0.322 | -| large | 0.50:0.95 | 0.478 | - - - - area | IoU | Average Recall | -|:-------|:------|:----------------| -| all | 0.50:0.95 | 0.278 | -| all | 0.50:0.95 | 0.434 | -| all | 0.50:0.95 | 0.462 | -| small | 0.50:0.95 | 0.198 | -| medium | 0.50:0.95 | 0.510 | -| large | 0.50:0.95 | 0.702 | -
- -| class | AP50 | mAP | class | AP50 | mAP | -|:--------------|:-------|:------|:---------------|:-------|:------| -| person | 67.5 | 41.8 | bicycle | 35.4 | 18.8 | -| car | 45.0 | 25.4 | motorcycle | 58.9 | 33.1 | -| airplane | 77.3 | 58.9 | bus | 68.8 | 56.4 | -| train | 81.1 | 60.5 | truck | 38.6 | 24.7 | -| boat | 35.5 | 16.7 | traffic light | 30.5 | 14.0 | -| fire hydrant | 69.8 | 54.5 | stop sign | 60.9 | 54.6 | -| parking meter | 55.1 | 38.5 | bench | 26.8 | 15.9 | -| bird | 38.3 | 23.6 | cat | 82.5 | 62.1 | -| dog | 67.0 | 51.4 | horse | 64.3 | 44.2 | -| sheep | 57.7 | 35.8 | cow | 61.2 | 39.9 | -| elephant | 79.9 | 56.2 | bear | 81.8 | 63.0 | -| zebra | 85.4 | 59.5 | giraffe | 84.1 | 59.9 | -| backpack | 12.4 | 5.9 | umbrella | 46.5 | 28.8 | -| handbag | 8.4 | 3.7 | tie | 35.2 | 19.6 | -| suitcase | 38.1 | 23.8 | frisbee | 60.7 | 43.9 | -| skis | 30.5 | 14.5 | snowboard | 32.3 | 18.2 | -| sports ball | 37.6 | 24.5 | kite | 51.1 | 30.4 | -| baseball bat | 28.9 | 13.6 | baseball glove | 40.1 | 21.6 | -| skateboard | 59.4 | 35.2 | surfboard | 47.9 | 26.6 | -| tennis racket | 55.2 | 30.5 | bottle | 34.7 | 20.2 | -| wine glass | 27.8 | 16.3 | cup | 35.5 | 23.7 | -| fork | 25.9 | 14.8 | knife | 10.9 | 5.6 | -| spoon | 8.7 | 4.1 | bowl | 42.8 | 29.4 | -| banana | 35.5 | 18.5 | apple | 19.4 | 12.9 | -| sandwich | 46.7 | 33.4 | orange | 35.2 | 25.9 | -| broccoli | 36.4 | 19.1 | carrot | 30.9 | 17.8 | -| hot dog | 42.7 | 29.3 | pizza | 61.0 | 44.9 | -| donut | 47.3 | 34.0 | cake | 39.9 | 24.4 | -| chair | 28.8 | 16.1 | couch | 60.5 | 42.6 | -| potted plant | 29.0 | 15.3 | bed | 63.3 | 46.0 | -| dining table | 39.6 | 27.5 | toilet | 71.3 | 55.3 | -| tv | 66.5 | 48.1 | laptop | 62.6 | 46.9 | -| mouse | 63.5 | 44.1 | remote | 19.8 | 10.3 | -| keyboard | 62.1 | 41.5 | cell phone | 33.7 | 22.8 | -| microwave | 54.9 | 39.6 | oven | 48.1 | 30.4 | -| toaster | 30.0 | 16.4 | sink | 44.5 | 27.8 | -| refrigerator | 63.2 | 46.1 | book | 18.4 | 7.3 | -| clock | 57.8 | 35.8 | vase | 33.7 | 22.1 | -| scissors | 27.8 | 17.8 | teddy bear | 54.1 | 35.4 | -| hair drier | 2.9 | 1.1 | toothbrush | 13.1 | 8.2 | - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -#### Contributor Details - -- Google Summer of Code'22 -- Contributor: Sri Siddarth Chakaravarthy -- Github Profile: https://github.com/Sidd1609 -- Organisation: OpenCV -- Project: Lightweight object detection models using OpenCV - -## Reference - -- Nanodet: https://zhuanlan.zhihu.com/p/306530300 -- Nanodet Plus: https://zhuanlan.zhihu.com/p/449912627 -- Nanodet weight and scripts for training: https://github.com/RangiLyu/nanodet diff --git a/models/object_detection_nanodet/demo.cpp b/models/object_detection_nanodet/demo.cpp deleted file mode 100644 index cce9165c..00000000 --- a/models/object_detection_nanodet/demo.cpp +++ /dev/null @@ -1,503 +0,0 @@ -#include -#include -#include - -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -const auto backendTargetPairs = vector> -{ - {DNN_BACKEND_OPENCV, DNN_TARGET_CPU}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA_FP16}, - {DNN_BACKEND_TIMVX, DNN_TARGET_NPU}, - {DNN_BACKEND_CANN, DNN_TARGET_NPU} -}; - -const vector nanodetClassLabels = -{ - "person", "bicycle", "car", "motorcycle", "airplane", "bus", - "train", "truck", "boat", "traffic light", "fire hydrant", - "stop sign", "parking meter", "bench", "bird", "cat", "dog", - "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", - "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", - "skis", "snowboard", "sports ball", "kite", "baseball bat", - "baseball glove", "skateboard", "surfboard", "tennis racket", - "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", - "banana", "apple", "sandwich", "orange", "broccoli", "carrot", - "hot dog", "pizza", "donut", "cake", "chair", "couch", - "potted plant", "bed", "dining table", "toilet", "tv", "laptop", - "mouse", "remote", "keyboard", "cell phone", "microwave", - "oven", "toaster", "sink", "refrigerator", "book", "clock", - "vase", "scissors", "teddy bear", "hair drier", "toothbrush" -}; - -class NanoDet -{ -public: - NanoDet(const String& modelPath, const float probThresh = 0.35, const float iouThresh = 0.6, - const Backend bId = DNN_BACKEND_DEFAULT, const Target tId = DNN_TARGET_CPU) : - modelPath(modelPath), probThreshold(probThresh), - iouThreshold(iouThresh), backendId(bId), targetId(tId), - imageShape(416, 416), regMax(7) - { - this->strides = { 8, 16, 32, 64 }; - this->net = readNet(modelPath); - this->net.setPreferableBackend(bId); - this->net.setPreferableTarget(tId); - this->project = Mat::zeros(1, this->regMax + 1, CV_32F); - for (size_t i = 0; i <= this->regMax; ++i) - { - this->project.at(0, i) = static_cast(i); - } - this->mean = Scalar(103.53, 116.28, 123.675); - this->std = Scalar(1.0 / 57.375, 1.0 / 57.12, 1.0 / 58.395); - this->generateAnchors(); - } - - Mat preProcess(const Mat& inputImage) - { - Image2BlobParams paramNanodet; - paramNanodet.datalayout = DNN_LAYOUT_NCHW; - paramNanodet.ddepth = CV_32F; - paramNanodet.mean = this->mean; - paramNanodet.scalefactor = this->std; - paramNanodet.size = this->imageShape; - Mat blob; - blobFromImageWithParams(inputImage, blob, paramNanodet); - return blob; - } - - Mat infer(const Mat& sourceImage) - { - Mat blob = this->preProcess(sourceImage); - this->net.setInput(blob); - vector modelOutput; - this->net.forward(modelOutput, this->net.getUnconnectedOutLayersNames()); - Mat preds = this->postProcess(modelOutput); - return preds; - } - - Mat reshapeIfNeeded(const Mat& input) - { - if (input.dims == 3) - { - return input.reshape(0, input.size[1]); - } - return input; - } - - Mat softmaxActivation(const Mat& input) - { - Mat x_exp, x_sum, x_repeat_sum, result; - exp(input.reshape(0, input.total() / (this->regMax + 1)), x_exp); - reduce(x_exp, x_sum, 1, REDUCE_SUM, CV_32F); - repeat(x_sum, 1, this->regMax + 1, x_repeat_sum); - divide(x_exp, x_repeat_sum, result); - return result; - } - - Mat applyProjection(Mat& input) - { - Mat repeat_project; - repeat(this->project, input.rows, 1, repeat_project); - multiply(input, repeat_project, input); - reduce(input, input, 1, REDUCE_SUM, CV_32F); - Mat projection = input.col(0).clone(); - return projection.reshape(0, projection.total() / 4); - } - - void preNMS(Mat& anchors, Mat& bbox_pred, Mat& cls_score, const int nms_pre = 1000) - { - Mat max_scores; - reduce(cls_score, max_scores, 1, REDUCE_MAX); - - Mat indices; - sortIdx(max_scores.t(), indices, SORT_DESCENDING); - - Mat indices_float = indices.colRange(0, nms_pre); - Mat selected_anchors, selected_bbox_pred, selected_cls_score; - for (int j = 0; j < indices_float.cols; ++j) - { - selected_anchors.push_back(anchors.row(indices_float.at(j))); - selected_bbox_pred.push_back(bbox_pred.row(indices_float.at(j))); - selected_cls_score.push_back(cls_score.row(indices_float.at(j))); - } - - anchors = selected_anchors; - bbox_pred = selected_bbox_pred; - cls_score = selected_cls_score; - } - - void clipBoundingBoxes(Mat& x1, Mat& y1, Mat& x2, Mat& y2) - { - Mat zeros = Mat::zeros(x1.size(), x1.type()); - x1 = min(max(x1, zeros), Scalar(this->imageShape.width - 1)); - y1 = min(max(y1, zeros), Scalar(this->imageShape.height - 1)); - x2 = min(max(x2, zeros), Scalar(this->imageShape.width - 1)); - y2 = min(max(y2, zeros), Scalar(this->imageShape.height - 1)); - } - - Mat calculateBoundingBoxes(const Mat& anchors, const Mat& bbox_pred) - { - Mat x1 = anchors.col(0) - bbox_pred.col(0); - Mat y1 = anchors.col(1) - bbox_pred.col(1); - Mat x2 = anchors.col(0) + bbox_pred.col(2); - Mat y2 = anchors.col(1) + bbox_pred.col(3); - - clipBoundingBoxes(x1, y1, x2, y2); - - Mat bboxes; - hconcat(vector{x1, y1, x2, y2}, bboxes); - - return bboxes; - } - - vector bboxMatToRect2d(const Mat& bboxes) - { - Mat bboxes_wh(bboxes.clone()); - bboxes_wh.colRange(2, 4) = bboxes_wh.colRange(2, 4) -= bboxes_wh.colRange(0, 2); - vector boxesXYXY; - for (size_t i = 0; i < bboxes_wh.rows; i++) - { - boxesXYXY.emplace_back(bboxes.at(i, 0), - bboxes.at(i, 1), - bboxes.at(i, 2), - bboxes.at(i, 3)); - } - return boxesXYXY; - } - - Mat postProcess(const vector& preds) - { - vector cls_scores, bbox_preds; - for (size_t i = 0; i < preds.size(); i += 2) - { - cls_scores.push_back(preds[i]); - bbox_preds.push_back(preds[i + 1]); - } - - vector bboxes_mlvl; - vector scores_mlvl; - - for (size_t i = 0; i < strides.size(); ++i) - { - if (i >= cls_scores.size() || i >= bbox_preds.size()) continue; - // Extract necessary data - int stride = strides[i]; - Mat cls_score = reshapeIfNeeded(cls_scores[i]); - Mat bbox_pred = reshapeIfNeeded(bbox_preds[i]); - Mat anchors = anchorsMlvl[i].t(); - - // Softmax activation, projection, and calculate bounding boxes - bbox_pred = softmaxActivation(bbox_pred); - bbox_pred = applyProjection(bbox_pred); - bbox_pred = stride * bbox_pred; - - const int nms_pre = 1000; - if (nms_pre > 0 && cls_score.rows > nms_pre) - { - preNMS(anchors, bbox_pred, cls_score, nms_pre); - } - - Mat bboxes = calculateBoundingBoxes(anchors, bbox_pred); - - - bboxes_mlvl.push_back(bboxes); - scores_mlvl.push_back(cls_score); - } - Mat bboxes; - Mat scores; - vconcat(bboxes_mlvl, bboxes); - vconcat(scores_mlvl, scores); - - vector boxesXYXY = bboxMatToRect2d(bboxes); - vector classIds; - vector confidences; - for (size_t i = 0; i < scores.rows; ++i) - { - Point maxLoc; - minMaxLoc(scores.row(i), nullptr, nullptr, nullptr, &maxLoc); - classIds.push_back(maxLoc.x); - confidences.push_back(scores.at(i, maxLoc.x)); - } - - vector indices; - NMSBoxesBatched(boxesXYXY, confidences, classIds, probThreshold, iouThreshold, indices); - Mat result(int(indices.size()), 6, CV_32FC1); - int row = 0; - for (auto idx : indices) - { - bboxes.rowRange(idx, idx + 1).copyTo(result(Rect(0, row, 4, 1))); - result.at(row, 4) = confidences[idx]; - result.at(row, 5) = static_cast(classIds[idx]); - row++; - } - if (indices.size() == 0) - { - return Mat(); - } - return result; - } - - void generateAnchors() - { - for (const int stride : strides) { - int feat_h = this->imageShape.height / stride; - int feat_w = this->imageShape.width / stride; - - vector anchors; - - for (int y = 0; y < feat_h; ++y) - { - for (int x = 0; x < feat_w; ++x) - { - float shift_x = x * stride; - float shift_y = y * stride; - float cx = shift_x + 0.5 * (stride - 1); - float cy = shift_y + 0.5 * (stride - 1); - Mat anchor_point = (Mat_(2, 1) << cx, cy); - anchors.push_back(anchor_point); - } - } - Mat anchors_mat; - hconcat(anchors, anchors_mat); - this->anchorsMlvl.push_back(anchors_mat); - } - } -private: - Net net; - String modelPath; - vector strides; - Size imageShape; - int regMax; - float probThreshold; - float iouThreshold; - Backend backendId; - Target targetId; - Mat project; - Scalar mean; - Scalar std; - vector anchorsMlvl; -}; - -// Function to resize and pad an image and return both the image and scale information -tuple> letterbox(const Mat& sourceImage, const Size& target_size = Size(416, 416)) -{ - Mat img = sourceImage.clone(); - - double top = 0, left = 0, newh = target_size.height, neww = target_size.width; - - if (img.rows != img.cols) - { - double hw_scale = static_cast(img.rows) / img.cols; - if (hw_scale > 1) - { - newh = target_size.height; - neww = static_cast(target_size.width / hw_scale); - resize(img, img, Size(neww, newh), 0, 0, INTER_AREA); - left = static_cast((target_size.width - neww) * 0.5); - copyMakeBorder(img, img, 0, 0, left, target_size.width - neww - left, BORDER_CONSTANT, Scalar(0)); - } - else - { - newh = static_cast(target_size.height * hw_scale); - neww = target_size.width; - resize(img, img, Size(neww, newh), 0, 0, INTER_AREA); - top = static_cast((target_size.height - newh) * 0.5); - copyMakeBorder(img, img, top, target_size.height - newh - top, 0, 0, BORDER_CONSTANT, Scalar(0)); - } - } - else - { - resize(img, img, target_size, 0, 0, INTER_AREA); - } - vector letterbox_scale = {top, left, newh, neww}; - - return make_tuple(img, letterbox_scale); -} - -// Function to scale bounding boxes back to original image coordinates -vector unletterbox(const Mat& bbox, const Size& original_image_shape, const vector& letterbox_scale) -{ - vector ret(bbox.cols); - - int h = original_image_shape.height; - int w = original_image_shape.width; - double top = letterbox_scale[0]; - double left = letterbox_scale[1]; - double newh = letterbox_scale[2]; - double neww = letterbox_scale[3]; - - if (h == w) - { - double ratio = static_cast(h) / newh; - for (int& val : ret) - { - val = static_cast(val * ratio); - } - return ret; - } - - double ratioh = static_cast(h) / newh; - double ratiow = static_cast(w) / neww; - ret[0] = max(static_cast((bbox.at(0) - left) * ratiow), 0); - ret[1] = max(static_cast((bbox.at(1) - top) * ratioh), 0); - ret[2] = min(static_cast((bbox.at(2) - left) * ratiow), w); - ret[3] = min(static_cast((bbox.at(3) - top) * ratioh), h); - - return ret; -} - -// Function to visualize predictions on an image -Mat visualize(const Mat& preds, const Mat& result_image, const vector& letterbox_scale, bool video, double fps = 0.0) -{ - Mat visualized_image = result_image.clone(); - - // Draw FPS if provided - if (fps > 0.0 && video) - { - std::ostringstream fps_stream; - fps_stream << "FPS: " << std::fixed << std::setprecision(2) << fps; - putText(visualized_image, fps_stream.str(), Point(10, 25), FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 0, 255), 2); - } - - // Draw bounding boxes and labels for each prediction - for (size_t i = 0; i < preds.rows; i++) - { - Mat pred = preds.row(i); - Mat bbox = pred.colRange(0, 4); - double conf = pred.at(4); - int classid = static_cast(pred.at(5)); - - // Convert bbox coordinates back to original image space - vector unnormalized_bbox = unletterbox(bbox, visualized_image.size(), letterbox_scale); - - // Draw bounding box - rectangle(visualized_image, Point(unnormalized_bbox[0], unnormalized_bbox[1]), - Point(unnormalized_bbox[2], unnormalized_bbox[3]), Scalar(0, 255, 0), 2); - - // Draw label - stringstream label; - label << nanodetClassLabels[classid] << ": " << fixed << setprecision(2) << conf; - putText(visualized_image, label.str(), Point(unnormalized_bbox[0], unnormalized_bbox[1] - 10), - FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 255, 0), 2); - } - - return visualized_image; -} - -void processImage(Mat& inputImage, NanoDet& nanodet, TickMeter& tm, bool save, bool vis, bool video) -{ - cvtColor(inputImage, inputImage, COLOR_BGR2RGB); - tuple> w = letterbox(inputImage); - Mat inputBlob = get<0>(w); - vector letterboxScale = get<1>(w); - - tm.start(); - Mat predictions = nanodet.infer(inputBlob); - tm.stop(); - if (!video) - { - cout << "Inference time: " << tm.getTimeMilli() << " ms\n"; - } - - Mat img = visualize(predictions, inputImage, letterboxScale, video, tm.getFPS()); - cvtColor(img, img, COLOR_BGR2RGB); - if (save) - { - static const string kOutputName = "result.jpg"; - imwrite(kOutputName, img); - if (!video) - { - cout << "Results saved to " + kOutputName << endl; - } - } - if (vis) - { - static const string kWinName = "model"; - imshow(kWinName, img); - } -} - - -const String keys = - "{ help h | | Print help message. }" - "{ model m | object_detection_nanodet_2022nov.onnx | Usage: Path to the model, defaults to object_detection_nanodet_2022nov.onnx }" - "{ input i | | Path to the input image. Omit for using the default camera.}" - "{ confidence | 0.35 | Class confidence }" - "{ nms | 0.6 | Enter nms IOU threshold }" - "{ save s | true | Specify to save results. This flag is invalid when using the camera. }" - "{ vis v | true | Specify to open a window for result visualization. This flag is invalid when using the camera. }" - "{ backend bt | 0 | Choose one of computation backends: " - "0: (default) OpenCV implementation + CPU, " - "1: CUDA + GPU (CUDA), " - "2: CUDA + GPU (CUDA FP16), " - "3: TIM-VX + NPU, " - "4: CANN + NPU}"; - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Use this script to run Nanodet inference using OpenCV, a contribution by Sri Siddarth Chakaravarthy as part of GSOC_2022."); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - string model = parser.get("model"); - string inputPath = parser.get("input"); - float confThreshold = parser.get("confidence"); - float nmsThreshold = parser.get("nms"); - bool save = parser.get("save"); - bool vis = parser.get("vis"); - int backendTargetid = parser.get("backend"); - - if (model.empty()) - { - CV_Error(Error::StsError, "Model file " + model + " not found"); - } - - NanoDet nanodet(model, confThreshold, nmsThreshold, - backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - - TickMeter tm; - if (parser.has("input")) - { - Mat inputImage = imread(samples::findFile(inputPath)); - static const bool kNotVideo = false; - processImage(inputImage, nanodet, tm, save, vis, kNotVideo); - waitKey(0); - } - else - { - VideoCapture cap; - cap.open(0); - if (!cap.isOpened()) - { - CV_Error(Error::StsError, "Cannot open video or file"); - } - - Mat frame; - while (waitKey(1) < 0) - { - cap >> frame; - if (frame.empty()) - { - cout << "Frame is empty" << endl; - waitKey(); - break; - } - tm.reset(); - static const bool kIsVideo = true; - processImage(frame, nanodet, tm, save, vis, kIsVideo); - } - cap.release(); - } - return 0; -} diff --git a/models/object_detection_nanodet/demo.py b/models/object_detection_nanodet/demo.py deleted file mode 100644 index ab6e980a..00000000 --- a/models/object_detection_nanodet/demo.py +++ /dev/null @@ -1,182 +0,0 @@ -import numpy as np -import cv2 as cv -import argparse - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from nanodet import NanoDet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -classes = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', - 'train', 'truck', 'boat', 'traffic light', 'fire hydrant', - 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', - 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', - 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', - 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', - 'baseball glove', 'skateboard', 'surfboard', 'tennis racket', - 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', - 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', - 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', - 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', - 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave', - 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', - 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush') - -def letterbox(srcimg, target_size=(416, 416)): - img = srcimg.copy() - - top, left, newh, neww = 0, 0, target_size[0], target_size[1] - if img.shape[0] != img.shape[1]: - hw_scale = img.shape[0] / img.shape[1] - if hw_scale > 1: - newh, neww = target_size[0], int(target_size[1] / hw_scale) - img = cv.resize(img, (neww, newh), interpolation=cv.INTER_AREA) - left = int((target_size[1] - neww) * 0.5) - img = cv.copyMakeBorder(img, 0, 0, left, target_size[1] - neww - left, cv.BORDER_CONSTANT, value=0) # add border - else: - newh, neww = int(target_size[0] * hw_scale), target_size[1] - img = cv.resize(img, (neww, newh), interpolation=cv.INTER_AREA) - top = int((target_size[0] - newh) * 0.5) - img = cv.copyMakeBorder(img, top, target_size[0] - newh - top, 0, 0, cv.BORDER_CONSTANT, value=0) - else: - img = cv.resize(img, target_size, interpolation=cv.INTER_AREA) - - letterbox_scale = [top, left, newh, neww] - return img, letterbox_scale - -def unletterbox(bbox, original_image_shape, letterbox_scale): - ret = bbox.copy() - - h, w = original_image_shape - top, left, newh, neww = letterbox_scale - - if h == w: - ratio = h / newh - ret = ret * ratio - return ret - - ratioh, ratiow = h / newh, w / neww - ret[0] = max((ret[0] - left) * ratiow, 0) - ret[1] = max((ret[1] - top) * ratioh, 0) - ret[2] = min((ret[2] - left) * ratiow, w) - ret[3] = min((ret[3] - top) * ratioh, h) - - return ret.astype(np.int32) - -def vis(preds, res_img, letterbox_scale, fps=None): - ret = res_img.copy() - - # draw FPS - if fps is not None: - fps_label = "FPS: %.2f" % fps - cv.putText(ret, fps_label, (10, 25), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2) - - # draw bboxes and labels - for pred in preds: - bbox = pred[:4] - conf = pred[-2] - classid = pred[-1].astype(np.int32) - - # bbox - xmin, ymin, xmax, ymax = unletterbox(bbox, ret.shape[:2], letterbox_scale) - cv.rectangle(ret, (xmin, ymin), (xmax, ymax), (0, 255, 0), thickness=2) - - # label - label = "{:s}: {:.2f}".format(classes[classid], conf) - cv.putText(ret, label, (xmin, ymin - 10), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), thickness=2) - - return ret - -if __name__=='__main__': - parser = argparse.ArgumentParser(description='Nanodet inference using OpenCV an contribution by Sri Siddarth Chakaravarthy part of GSOC_2022') - parser.add_argument('--input', '-i', type=str, - help='Path to the input image. Omit for using default camera.') - parser.add_argument('--model', '-m', type=str, - default='object_detection_nanodet_2022nov.onnx', help="Path to the model") - parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) - parser.add_argument('--confidence', default=0.35, type=float, - help='Class confidence') - parser.add_argument('--nms', default=0.6, type=float, - help='Enter nms IOU threshold') - parser.add_argument('--save', '-s', action='store_true', - help='Specify to save results. This flag is invalid when using camera.') - parser.add_argument('--vis', '-v', action='store_true', - help='Specify to open a window for result visualization. This flag is invalid when using camera.') - args = parser.parse_args() - - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - model = NanoDet(modelPath= args.model, - prob_threshold=args.confidence, - iou_threshold=args.nms, - backend_id=backend_id, - target_id=target_id) - - tm = cv.TickMeter() - tm.reset() - if args.input is not None: - image = cv.imread(args.input) - input_blob = cv.cvtColor(image, cv.COLOR_BGR2RGB) - - # Letterbox transformation - input_blob, letterbox_scale = letterbox(input_blob) - - # Inference - tm.start() - preds = model.infer(input_blob) - tm.stop() - print("Inference time: {:.2f} ms".format(tm.getTimeMilli())) - - img = vis(preds, image, letterbox_scale) - - if args.save: - print('Results saved to result.jpg\n') - cv.imwrite('result.jpg', img) - - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, img) - cv.waitKey(0) - - else: - print("Press any key to stop video capture") - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - input_blob = cv.cvtColor(frame, cv.COLOR_BGR2RGB) - input_blob, letterbox_scale = letterbox(input_blob) - # Inference - tm.start() - preds = model.infer(input_blob) - tm.stop() - - img = vis(preds, frame, letterbox_scale, fps=tm.getFPS()) - - cv.imshow("NanoDet Demo", img) - - tm.reset() diff --git a/models/object_detection_nanodet/example_outputs/1_res.jpg b/models/object_detection_nanodet/example_outputs/1_res.jpg deleted file mode 100644 index 642fd18c..00000000 --- a/models/object_detection_nanodet/example_outputs/1_res.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:920fb925f17720c68476fe77b396b87504081be0372662d33df0c0dcf9fc9562 -size 128531 diff --git a/models/object_detection_nanodet/example_outputs/2_res.jpg b/models/object_detection_nanodet/example_outputs/2_res.jpg deleted file mode 100644 index 1949805b..00000000 --- a/models/object_detection_nanodet/example_outputs/2_res.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f0138234ef80f63ecb51f51f651248a0157f5dda81d5b3fe390cbec42951bf99 -size 419826 diff --git a/models/object_detection_nanodet/example_outputs/3_res.jpg b/models/object_detection_nanodet/example_outputs/3_res.jpg deleted file mode 100644 index 675f320d..00000000 --- a/models/object_detection_nanodet/example_outputs/3_res.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cdf23ae1ceb2cde982c83763d74ea8317fceb3c5a091331cd5c7d39a08dda840 -size 114182 diff --git a/models/object_detection_nanodet/example_outputs/WebCamR.gif b/models/object_detection_nanodet/example_outputs/WebCamR.gif deleted file mode 100644 index 3b4dd944..00000000 --- a/models/object_detection_nanodet/example_outputs/WebCamR.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:169d6f224a20bef4c7a7c889d1b4e9168adf114da981edc3f9c087b7a3ec40ad -size 4814729 diff --git a/models/object_detection_nanodet/nanodet.py b/models/object_detection_nanodet/nanodet.py deleted file mode 100644 index 53af96b5..00000000 --- a/models/object_detection_nanodet/nanodet.py +++ /dev/null @@ -1,122 +0,0 @@ -import numpy as np -import cv2 - -class NanoDet: - def __init__(self, modelPath, prob_threshold=0.35, iou_threshold=0.6, backend_id=0, target_id=0): - self.strides = (8, 16, 32, 64) - self.image_shape = (416, 416) - self.reg_max = 7 - self.prob_threshold = prob_threshold - self.iou_threshold = iou_threshold - self.backend_id = backend_id - self.target_id = target_id - self.project = np.arange(self.reg_max + 1) - self.mean = np.array([103.53, 116.28, 123.675], dtype=np.float32).reshape(1, 1, 3) - self.std = np.array([57.375, 57.12, 58.395], dtype=np.float32).reshape(1, 1, 3) - self.net = cv2.dnn.readNet(modelPath) - self.net.setPreferableBackend(self.backend_id) - self.net.setPreferableTarget(self.target_id) - - self.anchors_mlvl = [] - for i in range(len(self.strides)): - featmap_size = (int(self.image_shape[0] / self.strides[i]), int(self.image_shape[1] / self.strides[i])) - stride = self.strides[i] - feat_h, feat_w = featmap_size - shift_x = np.arange(0, feat_w) * stride - shift_y = np.arange(0, feat_h) * stride - xv, yv = np.meshgrid(shift_x, shift_y) - xv = xv.flatten() - yv = yv.flatten() - cx = xv + 0.5 * (stride-1) - cy = yv + 0.5 * (stride - 1) - #anchors = np.stack((cx, cy), axis=-1) - anchors = np.column_stack((cx, cy)) - self.anchors_mlvl.append(anchors) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self.backend_id = backendId - self.target_id = targetId - self.net.setPreferableBackend(self.backend_id) - self.net.setPreferableTarget(self.target_id) - - def pre_process(self, img): - img = img.astype(np.float32) - img = (img - self.mean) / self.std - blob = cv2.dnn.blobFromImage(img) - return blob - - def infer(self, srcimg): - blob = self.pre_process(srcimg) - self.net.setInput(blob) - outs = self.net.forward(self.net.getUnconnectedOutLayersNames()) - preds = self.post_process(outs) - return preds - - def post_process(self, preds): - cls_scores, bbox_preds = preds[::2], preds[1::2] - rescale = False - scale_factor = 1 - bboxes_mlvl = [] - scores_mlvl = [] - for stride, cls_score, bbox_pred, anchors in zip(self.strides, cls_scores, bbox_preds, self.anchors_mlvl): - if cls_score.ndim==3: - cls_score = cls_score.squeeze(axis=0) - if bbox_pred.ndim==3: - bbox_pred = bbox_pred.squeeze(axis=0) - - x_exp = np.exp(bbox_pred.reshape(-1, self.reg_max + 1)) - x_sum = np.sum(x_exp, axis=1, keepdims=True) - bbox_pred = x_exp / x_sum - bbox_pred = np.dot(bbox_pred, self.project).reshape(-1,4) - bbox_pred *= stride - - nms_pre = 1000 - if nms_pre > 0 and cls_score.shape[0] > nms_pre: - max_scores = cls_score.max(axis=1) - topk_inds = max_scores.argsort()[::-1][0:nms_pre] - anchors = anchors[topk_inds, :] - bbox_pred = bbox_pred[topk_inds, :] - cls_score = cls_score[topk_inds, :] - - points = anchors - distance = bbox_pred - max_shape=self.image_shape - x1 = points[:, 0] - distance[:, 0] - y1 = points[:, 1] - distance[:, 1] - x2 = points[:, 0] + distance[:, 2] - y2 = points[:, 1] + distance[:, 3] - - if max_shape is not None: - x1 = np.clip(x1, 0, max_shape[1]) - y1 = np.clip(y1, 0, max_shape[0]) - x2 = np.clip(x2, 0, max_shape[1]) - y2 = np.clip(y2, 0, max_shape[0]) - - #bboxes = np.stack([x1, y1, x2, y2], axis=-1) - bboxes = np.column_stack([x1, y1, x2, y2]) - bboxes_mlvl.append(bboxes) - scores_mlvl.append(cls_score) - - bboxes_mlvl = np.concatenate(bboxes_mlvl, axis=0) - if rescale: - bboxes_mlvl /= scale_factor - scores_mlvl = np.concatenate(scores_mlvl, axis=0) - bboxes_wh = bboxes_mlvl.copy() - bboxes_wh[:, 2:4] = bboxes_wh[:, 2:4] - bboxes_wh[:, 0:2] - classIds = np.argmax(scores_mlvl, axis=1) - confidences = np.max(scores_mlvl, axis=1) - - indices = cv2.dnn.NMSBoxes(bboxes_wh.tolist(), confidences.tolist(), self.prob_threshold, self.iou_threshold) - - if len(indices)>0: - det_bboxes = bboxes_mlvl[indices] - det_conf = confidences[indices] - det_classid = classIds[indices] - - return np.concatenate([det_bboxes, det_conf.reshape(-1, 1), det_classid.reshape(-1, 1)], axis=1) - else: - return np.array([]) diff --git a/models/object_detection_nanodet/object_detection_nanodet_2022nov.onnx b/models/object_detection_nanodet/object_detection_nanodet_2022nov.onnx deleted file mode 100644 index 066cb860..00000000 --- a/models/object_detection_nanodet/object_detection_nanodet_2022nov.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4b82da9944b88577175ee23a459dce2e26e6e4be573def65b1055dc2d9720186 -size 3800954 diff --git a/models/object_detection_nanodet/object_detection_nanodet_2022nov_int8.onnx b/models/object_detection_nanodet/object_detection_nanodet_2022nov_int8.onnx deleted file mode 100644 index 430fd0b4..00000000 --- a/models/object_detection_nanodet/object_detection_nanodet_2022nov_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8dd32b85f2d273e9047f1d6b59e0b2fd008b1076338107bb547ac28942cdf90b -size 1031424 diff --git a/models/object_detection_nanodet/object_detection_nanodet_2022nov_int8bq.onnx b/models/object_detection_nanodet/object_detection_nanodet_2022nov_int8bq.onnx deleted file mode 100644 index 95880ff3..00000000 --- a/models/object_detection_nanodet/object_detection_nanodet_2022nov_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8a2c877cc6f09e7dfac7a9066e33ee5ae68de530b3b994f6ee9125cff6e34d3f -size 1123958 diff --git a/models/object_detection_yolox/CMakeLists.txt b/models/object_detection_yolox/CMakeLists.txt deleted file mode 100644 index 5c373e40..00000000 --- a/models/object_detection_yolox/CMakeLists.txt +++ /dev/null @@ -1,29 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(project_name "opencv_zoo_object_detection_yolox") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/object_detection_yolox/LICENSE b/models/object_detection_yolox/LICENSE deleted file mode 100644 index 1d4dc763..00000000 --- a/models/object_detection_yolox/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright (c) 2021-2022 Megvii Inc. All rights reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/object_detection_yolox/README.md b/models/object_detection_yolox/README.md index e902660c..f27b7f49 100644 --- a/models/object_detection_yolox/README.md +++ b/models/object_detection_yolox/README.md @@ -12,6 +12,9 @@ Key features of the YOLOX object detector - This version of YoloX: YoloX_s - `object_detection_yolox_2022nov_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. +### Download +Model file: [object_detection_yolox_2022nov.onnx](https://huggingface.co/opencv/opencv_zoo/resolve/main/models/object_detection_yolox/object_detection_yolox_2022nov.onnx) + ## Demo diff --git a/models/object_detection_yolox/demo.cpp b/models/object_detection_yolox/demo.cpp deleted file mode 100644 index 0239b1c7..00000000 --- a/models/object_detection_yolox/demo.cpp +++ /dev/null @@ -1,311 +0,0 @@ -#include -#include -#include - -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -vector< pair > backendTargetPairs = { - std::make_pair(dnn::DNN_BACKEND_OPENCV, dnn::DNN_TARGET_CPU), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA_FP16), - std::make_pair(dnn::DNN_BACKEND_TIMVX, dnn::DNN_TARGET_NPU), - std::make_pair(dnn::DNN_BACKEND_CANN, dnn::DNN_TARGET_NPU) }; - -vector labelYolox = { - "person", "bicycle", "car", "motorcycle", "airplane", "bus", - "train", "truck", "boat", "traffic light", "fire hydrant", - "stop sign", "parking meter", "bench", "bird", "cat", "dog", - "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", - "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", - "skis", "snowboard", "sports ball", "kite", "baseball bat", - "baseball glove", "skateboard", "surfboard", "tennis racket", - "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", - "banana", "apple", "sandwich", "orange", "broccoli", "carrot", - "hot dog", "pizza", "donut", "cake", "chair", "couch", - "potted plant", "bed", "dining table", "toilet", "tv", "laptop", - "mouse", "remote", "keyboard", "cell phone", "microwave", - "oven", "toaster", "sink", "refrigerator", "book", "clock", - "vase", "scissors", "teddy bear", "hair drier", "toothbrush" }; - -class YoloX { -private: - Net net; - string modelPath; - Size inputSize; - float confThreshold; - float nmsThreshold; - float objThreshold; - dnn::Backend backendId; - dnn::Target targetId; - int num_classes; - vector strides; - Mat expandedStrides; - Mat grids; - -public: - YoloX(string modPath, float confThresh = 0.35, float nmsThresh = 0.5, float objThresh = 0.5, dnn::Backend bId = DNN_BACKEND_DEFAULT, dnn::Target tId = DNN_TARGET_CPU) : - modelPath(modPath), confThreshold(confThresh), - nmsThreshold(nmsThresh), objThreshold(objThresh), - backendId(bId), targetId(tId) - { - this->num_classes = int(labelYolox.size()); - this->net = readNet(modelPath); - this->inputSize = Size(640, 640); - this->strides = vector{ 8, 16, 32 }; - this->net.setPreferableBackend(this->backendId); - this->net.setPreferableTarget(this->targetId); - this->generateAnchors(); - } - - Mat preprocess(Mat img) - { - Mat blob; - Image2BlobParams paramYolox; - paramYolox.datalayout = DNN_LAYOUT_NCHW; - paramYolox.ddepth = CV_32F; - paramYolox.mean = Scalar::all(0); - paramYolox.scalefactor = Scalar::all(1); - paramYolox.size = Size(img.cols, img.rows); - paramYolox.swapRB = true; - - blob = blobFromImageWithParams(img, paramYolox); - return blob; - } - - Mat infer(Mat srcimg) - { - Mat inputBlob = this->preprocess(srcimg); - - this->net.setInput(inputBlob); - vector outs; - this->net.forward(outs, this->net.getUnconnectedOutLayersNames()); - - Mat predictions = this->postprocess(outs[0]); - return predictions; - } - - Mat postprocess(Mat outputs) - { - Mat dets = outputs.reshape(0,outputs.size[1]); - Mat col01; - add(dets.colRange(0, 2), this->grids, col01); - Mat col23; - exp(dets.colRange(2, 4), col23); - vector col = { col01, col23 }; - Mat boxes; - hconcat(col, boxes); - float* ptr = this->expandedStrides.ptr(0); - for (int r = 0; r < boxes.rows; r++, ptr++) - { - boxes.rowRange(r, r + 1) = *ptr * boxes.rowRange(r, r + 1); - } - // get boxes - Mat boxes_xywh(boxes.rows, boxes.cols, CV_32FC1, Scalar(1)); - Mat scores = dets.colRange(5, dets.cols).clone(); - vector maxScores(dets.rows); - vector maxScoreIdx(dets.rows); - vector boxesXYWH(dets.rows); - - for (int r = 0; r < boxes_xywh.rows; r++, ptr++) - { - boxes_xywh.at(r, 0) = boxes.at(r, 0) - boxes.at(r, 2) / 2.f; - boxes_xywh.at(r, 1) = boxes.at(r, 1) - boxes.at(r, 3) / 2.f; - boxes_xywh.at(r, 2) = boxes.at(r, 2); - boxes_xywh.at(r, 3) = boxes.at(r, 3); - // get scores and class indices - scores.rowRange(r, r + 1) = scores.rowRange(r, r + 1) * dets.at(r, 4); - double minVal, maxVal; - Point maxIdx; - minMaxLoc(scores.rowRange(r, r+1), &minVal, &maxVal, nullptr, &maxIdx); - maxScoreIdx[r] = maxIdx.x; - maxScores[r] = float(maxVal); - boxesXYWH[r].x = boxes_xywh.at(r, 0); - boxesXYWH[r].y = boxes_xywh.at(r, 1); - boxesXYWH[r].width = boxes_xywh.at(r, 2); - boxesXYWH[r].height = boxes_xywh.at(r, 3); - } - - vector keep; - NMSBoxesBatched(boxesXYWH, maxScores, maxScoreIdx, this->confThreshold, this->nmsThreshold, keep); - Mat candidates(int(keep.size()), 6, CV_32FC1); - int row = 0; - for (auto idx : keep) - { - boxes_xywh.rowRange(idx, idx + 1).copyTo(candidates(Rect(0, row, 4, 1))); - candidates.at(row, 4) = maxScores[idx]; - candidates.at(row, 5) = float(maxScoreIdx[idx]); - row++; - } - if (keep.size() == 0) - return Mat(); - return candidates; - - } - - - void generateAnchors() - { - vector< tuple > nb; - int total = 0; - - for (auto v : this->strides) - { - int w = this->inputSize.width / v; - int h = this->inputSize.height / v; - nb.push_back(tuple(w * h, w, v)); - total += w * h; - } - this->grids = Mat(total, 2, CV_32FC1); - this->expandedStrides = Mat(total, 1, CV_32FC1); - float* ptrGrids = this->grids.ptr(0); - float* ptrStrides = this->expandedStrides.ptr(0); - int pos = 0; - for (auto le : nb) - { - int r = get<1>(le); - for (int i = 0; i < get<0>(le); i++, pos++) - { - *ptrGrids++ = float(i % r); - *ptrGrids++ = float(i / r); - *ptrStrides++ = float((get<2>(le))); - } - } - } -}; - -std::string keys = -"{ help h | | Print help message. }" -"{ model m | object_detection_yolox_2022nov.onnx | Usage: Path to the model, defaults to object_detection_yolox_2022nov.onnx }" -"{ input i | | Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ confidence | 0.5 | Class confidence }" -"{ obj | 0.5 | Enter object threshold }" -"{ nms | 0.5 | Enter nms IOU threshold }" -"{ save s | true | Specify to save results. This flag is invalid when using camera. }" -"{ vis v | 1 | Specify to open a window for result visualization. This flag is invalid when using camera. }" -"{ backend bt | 0 | Choose one of computation backends: " -"0: (default) OpenCV implementation + CPU, " -"1: CUDA + GPU (CUDA), " -"2: CUDA + GPU (CUDA FP16), " -"3: TIM-VX + NPU, " -"4: CANN + NPU}"; - -pair letterBox(Mat srcimg, Size targetSize = Size(640, 640)) -{ - Mat paddedImg(targetSize.height, targetSize.width, CV_32FC3, Scalar::all(114.0)); - Mat resizeImg; - - double ratio = min(targetSize.height / double(srcimg.rows), targetSize.width / double(srcimg.cols)); - resize(srcimg, resizeImg, Size(int(srcimg.cols * ratio), int(srcimg.rows * ratio)), INTER_LINEAR); - resizeImg.copyTo(paddedImg(Rect(0, 0, int(srcimg.cols * ratio), int(srcimg.rows * ratio)))); - return pair(paddedImg, ratio); -} - -Mat unLetterBox(Mat bbox, double letterboxScale) -{ - return bbox / letterboxScale; -} - -Mat visualize(Mat dets, Mat srcimg, double letterbox_scale, double fps = -1) -{ - Mat resImg = srcimg.clone(); - - if (fps > 0) - putText(resImg, format("FPS: %.2f", fps), Size(10, 25), FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 0, 255), 2); - - for (int row = 0; row < dets.rows; row++) - { - Mat boxF = unLetterBox(dets(Rect(0, row, 4, 1)), letterbox_scale); - Mat box; - boxF.convertTo(box, CV_32S); - float score = dets.at(row, 4); - int clsId = int(dets.at(row, 5)); - - int x0 = box.at(0, 0); - int y0 = box.at(0, 1); - int x1 = box.at(0, 2); - int y1 = box.at(0, 3); - - string text = format("%s : %f", labelYolox[clsId].c_str(), score * 100); - int font = FONT_HERSHEY_SIMPLEX; - int baseLine = 0; - Size txtSize = getTextSize(text, font, 0.4, 1, &baseLine); - rectangle(resImg, Point(x0, y0), Point(x1, y1), Scalar(0, 255, 0), 2); - rectangle(resImg, Point(x0, y0 + 1), Point(x0 + txtSize.width + 1, y0 + int(1.5 * txtSize.height)), Scalar(255, 255, 255), -1); - putText(resImg, text, Point(x0, y0 + txtSize.height), font, 0.4, Scalar(0, 0, 0), 1); - } - - return resImg; -} - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Use this script to run Yolox deep learning networks in opencv_zoo using OpenCV."); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - string model = parser.get("model"); - float confThreshold = parser.get("confidence"); - float objThreshold = parser.get("obj"); - float nmsThreshold = parser.get("nms"); - bool vis = parser.get("vis"); - bool save = parser.get("save"); - int backendTargetid = parser.get("backend"); - - if (model.empty()) - { - CV_Error(Error::StsError, "Model file " + model + " not found"); - } - - YoloX modelNet(model, confThreshold, nmsThreshold, objThreshold, - backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - //! [Open a video file or an image file or a camera stream] - VideoCapture cap; - if (parser.has("input")) - cap.open(samples::findFile(parser.get("input"))); - else - cap.open(0); - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot open video or file"); - Mat frame, inputBlob; - double letterboxScale; - - static const std::string kWinName = model; - int nbInference = 0; - while (waitKey(1) < 0) - { - cap >> frame; - if (frame.empty()) - { - cout << "Frame is empty" << endl; - waitKey(); - break; - } - pair w = letterBox(frame); - inputBlob = get<0>(w); - letterboxScale = get<1>(w); - TickMeter tm; - tm.start(); - Mat predictions = modelNet.infer(inputBlob); - tm.stop(); - cout << "Inference time: " << tm.getTimeMilli() << " ms\n"; - Mat img = visualize(predictions, frame, letterboxScale, tm.getFPS()); - if (save && parser.has("input")) - { - imwrite("result.jpg", img); - } - if (vis) - { - imshow(kWinName, img); - } - } - return 0; -} diff --git a/models/object_detection_yolox/demo.py b/models/object_detection_yolox/demo.py deleted file mode 100644 index a05f777e..00000000 --- a/models/object_detection_yolox/demo.py +++ /dev/null @@ -1,155 +0,0 @@ -import numpy as np -import cv2 as cv -import argparse - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from yolox import YoloX - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -classes = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', - 'train', 'truck', 'boat', 'traffic light', 'fire hydrant', - 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', - 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', - 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', - 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', - 'baseball glove', 'skateboard', 'surfboard', 'tennis racket', - 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', - 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', - 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', - 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', - 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave', - 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', - 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush') - -def letterbox(srcimg, target_size=(640, 640)): - padded_img = np.ones((target_size[0], target_size[1], 3)).astype(np.float32) * 114.0 - ratio = min(target_size[0] / srcimg.shape[0], target_size[1] / srcimg.shape[1]) - resized_img = cv.resize( - srcimg, (int(srcimg.shape[1] * ratio), int(srcimg.shape[0] * ratio)), interpolation=cv.INTER_LINEAR - ).astype(np.float32) - padded_img[: int(srcimg.shape[0] * ratio), : int(srcimg.shape[1] * ratio)] = resized_img - - return padded_img, ratio - -def unletterbox(bbox, letterbox_scale): - return bbox / letterbox_scale - -def vis(dets, srcimg, letterbox_scale, fps=None): - res_img = srcimg.copy() - - if fps is not None: - fps_label = "FPS: %.2f" % fps - cv.putText(res_img, fps_label, (10, 25), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2) - - for det in dets: - box = unletterbox(det[:4], letterbox_scale).astype(np.int32) - score = det[-2] - cls_id = int(det[-1]) - - x0, y0, w, h = box - - text = '{}:{:.1f}%'.format(classes[cls_id], score * 100) - font = cv.FONT_HERSHEY_SIMPLEX - txt_size = cv.getTextSize(text, font, 0.4, 1)[0] - cv.rectangle(res_img, (x0, y0 , w, h), (0, 255, 0), 2) - cv.rectangle(res_img, (x0, y0 + 1), (x0 + txt_size[0] + 1, y0 + int(1.5 * txt_size[1])), (255, 255, 255), -1) - cv.putText(res_img, text, (x0, y0 + txt_size[1]), font, 0.4, (0, 0, 0), thickness=1) - - return res_img - -if __name__=='__main__': - parser = argparse.ArgumentParser(description='Nanodet inference using OpenCV an contribution by Sri Siddarth Chakaravarthy part of GSOC_2022') - parser.add_argument('--input', '-i', type=str, - help='Path to the input image. Omit for using default camera.') - parser.add_argument('--model', '-m', type=str, default='object_detection_yolox_2022nov.onnx', - help="Path to the model") - parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) - parser.add_argument('--confidence', default=0.5, type=float, - help='Class confidence') - parser.add_argument('--nms', default=0.5, type=float, - help='Enter nms IOU threshold') - parser.add_argument('--obj', default=0.5, type=float, - help='Enter object threshold') - parser.add_argument('--save', '-s', action='store_true', - help='Specify to save results. This flag is invalid when using camera.') - parser.add_argument('--vis', '-v', action='store_true', - help='Specify to open a window for result visualization. This flag is invalid when using camera.') - args = parser.parse_args() - - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - model_net = YoloX(modelPath= args.model, - confThreshold=args.confidence, - nmsThreshold=args.nms, - objThreshold=args.obj, - backendId=backend_id, - targetId=target_id) - - tm = cv.TickMeter() - tm.reset() - if args.input is not None: - image = cv.imread(args.input) - input_blob = cv.cvtColor(image, cv.COLOR_BGR2RGB) - input_blob, letterbox_scale = letterbox(input_blob) - - # Inference - tm.start() - preds = model_net.infer(input_blob) - tm.stop() - print("Inference time: {:.2f} ms".format(tm.getTimeMilli())) - - img = vis(preds, image, letterbox_scale) - - if args.save: - print('Results saved to result.jpg\n') - cv.imwrite('result.jpg', img) - - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, img) - cv.waitKey(0) - - else: - print("Press any key to stop video capture") - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - input_blob = cv.cvtColor(frame, cv.COLOR_BGR2RGB) - input_blob, letterbox_scale = letterbox(input_blob) - - # Inference - tm.start() - preds = model_net.infer(input_blob) - tm.stop() - - img = vis(preds, frame, letterbox_scale, fps=tm.getFPS()) - - cv.imshow("YoloX Demo", img) - - tm.reset() diff --git a/models/object_detection_yolox/example_outputs/1_res.jpg b/models/object_detection_yolox/example_outputs/1_res.jpg deleted file mode 100644 index aab8c964..00000000 --- a/models/object_detection_yolox/example_outputs/1_res.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e03d69d6e1420bb84f3426b7d5f607cdfc86e522e33ee646cfc970e9ff53d9ea -size 124300 diff --git a/models/object_detection_yolox/example_outputs/2_res.jpg b/models/object_detection_yolox/example_outputs/2_res.jpg deleted file mode 100644 index a13b5f3e..00000000 --- a/models/object_detection_yolox/example_outputs/2_res.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4a54924474eeb1c2d8cbb4f2245e003c28aff8256eebfa1bf2653f4af41eaa66 -size 402200 diff --git a/models/object_detection_yolox/example_outputs/3_res.jpg b/models/object_detection_yolox/example_outputs/3_res.jpg deleted file mode 100644 index 42ac0a1f..00000000 --- a/models/object_detection_yolox/example_outputs/3_res.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e26d3b5cb7ac326fd2d431611f397ad710d210a22109fa6d876dacee262db63c -size 119379 diff --git a/models/object_detection_yolox/object_detection_yolox_2022nov.onnx b/models/object_detection_yolox/object_detection_yolox_2022nov.onnx deleted file mode 100644 index 0a22cdd5..00000000 --- a/models/object_detection_yolox/object_detection_yolox_2022nov.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c5c2d13e59ae883e6af3b45daea64af4833a4951c92d116ec270d9ddbe998063 -size 35858002 diff --git a/models/object_detection_yolox/object_detection_yolox_2022nov_int8.onnx b/models/object_detection_yolox/object_detection_yolox_2022nov_int8.onnx deleted file mode 100644 index af996081..00000000 --- a/models/object_detection_yolox/object_detection_yolox_2022nov_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:01a3b0f400b30bc1e45230e991b2e499ab42622485a330021947333fbaf03935 -size 9079452 diff --git a/models/object_detection_yolox/object_detection_yolox_2022nov_int8bq.onnx b/models/object_detection_yolox/object_detection_yolox_2022nov_int8bq.onnx deleted file mode 100644 index ef4f2d2d..00000000 --- a/models/object_detection_yolox/object_detection_yolox_2022nov_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dcaae0aaa2fea4167f89235ee340eb869d3707b25712218d4c7ce921ac90e2ba -size 9744418 diff --git a/models/object_detection_yolox/yolox.py b/models/object_detection_yolox/yolox.py deleted file mode 100644 index 503b3fce..00000000 --- a/models/object_detection_yolox/yolox.py +++ /dev/null @@ -1,85 +0,0 @@ -import numpy as np -import cv2 - -class YoloX: - def __init__(self, modelPath, confThreshold=0.35, nmsThreshold=0.5, objThreshold=0.5, backendId=0, targetId=0): - self.num_classes = 80 - self.net = cv2.dnn.readNet(modelPath) - self.input_size = (640, 640) - self.mean = np.array([0.485, 0.456, 0.406], dtype=np.float32).reshape(1, 1, 3) - self.std = np.array([0.229, 0.224, 0.225], dtype=np.float32).reshape(1, 1, 3) - self.strides = [8, 16, 32] - self.confThreshold = confThreshold - self.nmsThreshold = nmsThreshold - self.objThreshold = objThreshold - self.backendId = backendId - self.targetId = targetId - self.net.setPreferableBackend(self.backendId) - self.net.setPreferableTarget(self.targetId) - - self.generateAnchors() - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self.backendId = backendId - self.targetId = targetId - self.net.setPreferableBackend(self.backendId) - self.net.setPreferableTarget(self.targetId) - - def preprocess(self, img): - blob = np.transpose(img, (2, 0, 1)) - return blob[np.newaxis, :, :, :] - - def infer(self, srcimg): - input_blob = self.preprocess(srcimg) - - self.net.setInput(input_blob) - outs = self.net.forward(self.net.getUnconnectedOutLayersNames()) - - predictions = self.postprocess(outs[0]) - return predictions - - def postprocess(self, outputs): - dets = outputs[0] - - dets[:, :2] = (dets[:, :2] + self.grids) * self.expanded_strides - dets[:, 2:4] = np.exp(dets[:, 2:4]) * self.expanded_strides - - # get boxes - boxes = dets[:, :4] - boxes_xywh = np.ones_like(boxes) - boxes_xywh[:, 0] = boxes[:, 0] - boxes[:, 2] / 2. - boxes_xywh[:, 1] = boxes[:, 1] - boxes[:, 3] / 2. - boxes_xywh[:, 2] = boxes[:, 2] - boxes_xywh[:, 3] = boxes[:, 3] - - # get scores and class indices - scores = dets[:, 4:5] * dets[:, 5:] - max_scores = np.amax(scores, axis=1) - max_scores_idx = np.argmax(scores, axis=1) - - keep = cv2.dnn.NMSBoxesBatched(boxes_xywh.tolist(), max_scores.tolist(), max_scores_idx.tolist(), self.confThreshold, self.nmsThreshold) - - candidates = np.concatenate([boxes_xywh, max_scores[:, None], max_scores_idx[:, None]], axis=1) - if len(keep) == 0: - return np.array([]) - return candidates[keep] - - def generateAnchors(self): - self.grids = [] - self.expanded_strides = [] - hsizes = [self.input_size[0] // stride for stride in self.strides] - wsizes = [self.input_size[1] // stride for stride in self.strides] - - for hsize, wsize, stride in zip(hsizes, wsizes, self.strides): - xv, yv = np.meshgrid(np.arange(hsize), np.arange(wsize)) - grid = np.stack((xv, yv), 2).reshape(1, -1, 2) - self.grids.append(grid) - shape = grid.shape[:2] - self.expanded_strides.append(np.full((*shape, 1), stride)) - - self.grids = np.concatenate(self.grids, 1) - self.expanded_strides = np.concatenate(self.expanded_strides, 1) diff --git a/models/object_tracking_vittrack/CMakeLists.txt b/models/object_tracking_vittrack/CMakeLists.txt deleted file mode 100644 index 51644e83..00000000 --- a/models/object_tracking_vittrack/CMakeLists.txt +++ /dev/null @@ -1,32 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(project_name "opencv_zoo_object_tracking_vittrack") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Set C++ compilation standard to C++11 -set(CMAKE_CXX_STANDARD 11) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/object_tracking_vittrack/LICENSE b/models/object_tracking_vittrack/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/object_tracking_vittrack/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/object_tracking_vittrack/README.md b/models/object_tracking_vittrack/README.md index 0da588ad..14a08482 100644 --- a/models/object_tracking_vittrack/README.md +++ b/models/object_tracking_vittrack/README.md @@ -12,6 +12,10 @@ This model is contributed by [Pengyu Liu](https://github.com/lpylpy0514) in GSoC - OpenCV > 4.8.0 is required. Build from source with instructions from https://opencv.org/get-started/.** - `object_tracking_vittrack_2023sep_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. +### Download +Model file: [object_tracking_vittrack_2023sep.onnx](https://huggingface.co/opencv/opencv_zoo/resolve/main/models/object_tracking_vittrack/object_tracking_vittrack_2023sep.onnx) + + # Demo ## Python diff --git a/models/object_tracking_vittrack/demo.cpp b/models/object_tracking_vittrack/demo.cpp deleted file mode 100644 index c8ccb26b..00000000 --- a/models/object_tracking_vittrack/demo.cpp +++ /dev/null @@ -1,210 +0,0 @@ -#include -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -struct TrackingResult -{ - bool isLocated; - Rect bbox; - float score; -}; - -class VitTrack -{ -public: - - VitTrack(const string& model_path, int backend_id = 0, int target_id = 0) - { - params.net = model_path; - params.backend = backend_id; - params.target = target_id; - model = TrackerVit::create(params); - } - - void init(const Mat& image, const Rect& roi) - { - model->init(image, roi); - } - - TrackingResult infer(const Mat& image) - { - TrackingResult result; - result.isLocated = model->update(image, result.bbox); - result.score = model->getTrackingScore(); - return result; - } - -private: - TrackerVit::Params params; - Ptr model; -}; - -Mat visualize(const Mat& image, const Rect& bbox, float score, bool isLocated, double fps = -1.0, - const Scalar& box_color = Scalar(0, 255, 0), const Scalar& text_color = Scalar(0, 255, 0), - double fontScale = 1.0, int fontSize = 1) -{ - Mat output = image.clone(); - int h = output.rows; - int w = output.cols; - - if (fps >= 0) - { - putText(output, "FPS: " + to_string(fps), Point(0, 30), FONT_HERSHEY_DUPLEX, fontScale, text_color, fontSize); - } - - if (isLocated && score >= 0.3) - { - rectangle(output, bbox, box_color, 2); - putText(output, format("%.2f", score), Point(bbox.x, bbox.y + 25), - FONT_HERSHEY_DUPLEX, fontScale, text_color, fontSize); - } - else - { - Size text_size = getTextSize("Target lost!", FONT_HERSHEY_DUPLEX, fontScale, fontSize, nullptr); - int text_x = (w - text_size.width) / 2; - int text_y = (h - text_size.height) / 2; - putText(output, "Target lost!", Point(text_x, text_y), FONT_HERSHEY_DUPLEX, fontScale, Scalar(0, 0, 255), fontSize); - } - - return output; -} - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, - "{help h | | Print help message. }" - "{input i | |Set path to the input video. Omit for using default camera.}" - "{model_path |object_tracking_vittrack_2023sep.onnx |Set model path}" - "{backend_target bt |0 |Choose backend-target pair: 0 - OpenCV implementation + CPU, 1 - CUDA + GPU (CUDA), 2 - CUDA + GPU (CUDA FP16), 3 - TIM-VX + NPU, 4 - CANN + NPU}" - "{save s |false |Specify to save a file with results.}" - "{vis v |true |Specify to open a new window to show results.}"); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - string input = parser.get("input"); - string model_path = parser.get("model_path"); - int backend_target = parser.get("backend_target"); - bool save = parser.get("save"); - bool vis = parser.get("vis"); - - vector> backend_target_pairs = - { - {DNN_BACKEND_OPENCV, DNN_TARGET_CPU}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA}, - {DNN_BACKEND_CUDA, DNN_TARGET_CUDA_FP16}, - {DNN_BACKEND_TIMVX, DNN_TARGET_NPU}, - {DNN_BACKEND_CANN, DNN_TARGET_NPU} - }; - - int backend_id = backend_target_pairs[backend_target][0]; - int target_id = backend_target_pairs[backend_target][1]; - - // Create VitTrack tracker - VitTrack tracker(model_path, backend_id, target_id); - - // Open video capture - VideoCapture video; - if (input.empty()) - { - video.open(0); // Default camera - } - else - { - video.open(input); - } - - if (!video.isOpened()) - { - cerr << "Error: Could not open video source" << endl; - return -1; - } - - // Select an object - Mat first_frame; - video >> first_frame; - - if (first_frame.empty()) - { - cerr << "No frames grabbed!" << endl; - return -1; - } - - Mat first_frame_copy = first_frame.clone(); - putText(first_frame_copy, "1. Drag a bounding box to track.", Point(0, 25), FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 255, 0)); - putText(first_frame_copy, "2. Press ENTER to confirm", Point(0, 50), FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 255, 0)); - Rect roi = selectROI("VitTrack Demo", first_frame_copy); - - if (roi.area() == 0) - { - cerr << "No ROI is selected! Exiting..." << endl; - return -1; - } - else - { - cout << "Selected ROI: " << roi << endl; - } - - // Create VideoWriter if save option is specified - VideoWriter output_video; - if (save) - { - Size frame_size = first_frame.size(); - output_video.open("output.mp4", VideoWriter::fourcc('m', 'p', '4', 'v'), video.get(CAP_PROP_FPS), frame_size); - if (!output_video.isOpened()) - { - cerr << "Error: Could not create output video stream" << endl; - return -1; - } - } - - // Initialize tracker with ROI - tracker.init(first_frame, roi); - - // Track frame by frame - TickMeter tm; - while (waitKey(1) < 0) - { - video >> first_frame; - if (first_frame.empty()) - { - cout << "End of video" << endl; - break; - } - - // Inference - tm.start(); - TrackingResult result = tracker.infer(first_frame); - tm.stop(); - - // Visualize - Mat frame = first_frame.clone(); - frame = visualize(frame, result.bbox, result.score, result.isLocated, tm.getFPS()); - - if (save) - { - output_video.write(frame); - } - - if (vis) - { - imshow("VitTrack Demo", frame); - } - tm.reset(); - } - - if (save) - { - output_video.release(); - } - - video.release(); - destroyAllWindows(); - - return 0; -} diff --git a/models/object_tracking_vittrack/demo.py b/models/object_tracking_vittrack/demo.py deleted file mode 100644 index 21d65878..00000000 --- a/models/object_tracking_vittrack/demo.py +++ /dev/null @@ -1,125 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. - -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from vittrack import VitTrack - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser( - description="VIT track opencv API") -parser.add_argument('--input', '-i', type=str, - help='Usage: Set path to the input video. Omit for using default camera.') -parser.add_argument('--model_path', type=str, default='object_tracking_vittrack_2023sep.onnx', - help='Usage: Set model path') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--save', '-s', action='store_true', default=False, - help='Usage: Specify to save a file with results.') -parser.add_argument('--vis', '-v', action='store_true', default=True, - help='Usage: Specify to open a new window to show results.') -args = parser.parse_args() -def visualize(image, bbox, score, isLocated, fps=None, box_color=(0, 255, 0),text_color=(0, 255, 0), fontScale = 1, fontSize = 1): - output = image.copy() - h, w, _ = output.shape - - if fps is not None: - cv.putText(output, 'FPS: {:.2f}'.format(fps), (0, 30), cv.FONT_HERSHEY_DUPLEX, fontScale, text_color, fontSize) - - if isLocated and score >= 0.3: - # bbox: Tuple of length 4 - x, y, w, h = bbox - cv.rectangle(output, (x, y), (x+w, y+h), box_color, 2) - cv.putText(output, '{:.2f}'.format(score), (x, y+25), cv.FONT_HERSHEY_DUPLEX, fontScale, text_color, fontSize) - else: - text_size, baseline = cv.getTextSize('Target lost!', cv.FONT_HERSHEY_DUPLEX, fontScale, fontSize) - text_x = int((w - text_size[0]) / 2) - text_y = int((h - text_size[1]) / 2) - cv.putText(output, 'Target lost!', (text_x, text_y), cv.FONT_HERSHEY_DUPLEX, fontScale, (0, 0, 255), fontSize) - - return output - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - model = VitTrack( - model_path=args.model_path, - backend_id=backend_id, - target_id=target_id) - - # Read from args.input - _input = 0 if args.input is None else args.input - video = cv.VideoCapture(_input) - - # Select an object - has_frame, first_frame = video.read() - if not has_frame: - print('No frames grabbed!') - exit() - first_frame_copy = first_frame.copy() - cv.putText(first_frame_copy, "1. Drag a bounding box to track.", (0, 25), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0)) - cv.putText(first_frame_copy, "2. Press ENTER to confirm", (0, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0)) - roi = cv.selectROI('VitTrack Demo', first_frame_copy) - - if np.all(np.array(roi) == 0): - print("No ROI is selected! Exiting ...") - exit() - else: - print("Selected ROI: {}".format(roi)) - - if args.save: - fps = video.get(cv.CAP_PROP_FPS) - frame_size = (first_frame.shape[1], first_frame.shape[0]) - output_video = cv.VideoWriter('output.mp4', cv.VideoWriter_fourcc(*'mp4v'), fps, frame_size) - - # Init tracker with ROI - model.init(first_frame, roi) - - # Track frame by frame - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - has_frame, frame = video.read() - if not has_frame: - print('End of video') - break - # Inference - tm.start() - isLocated, bbox, score = model.infer(frame) - tm.stop() - # Visualize - frame = visualize(frame, bbox, score, isLocated, fps=tm.getFPS()) - if args.save: - output_video.write(frame) - - if args.vis: - cv.imshow('VitTrack Demo', frame) - tm.reset() - - if args.save: - output_video.release() - - video.release() - cv.destroyAllWindows() diff --git a/models/object_tracking_vittrack/example_outputs/vittrack_demo.gif b/models/object_tracking_vittrack/example_outputs/vittrack_demo.gif deleted file mode 100644 index 3aa482c3..00000000 --- a/models/object_tracking_vittrack/example_outputs/vittrack_demo.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:57e5a2497186f8171809fccc3cd84473aefc6fcd2ce2f0d5b92a1382e92a7768 -size 615598 diff --git a/models/object_tracking_vittrack/object_tracking_vittrack_2023sep.onnx b/models/object_tracking_vittrack/object_tracking_vittrack_2023sep.onnx deleted file mode 100644 index 7df76db6..00000000 --- a/models/object_tracking_vittrack/object_tracking_vittrack_2023sep.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2990f0b7cd44d92afa48cd97db6de7be113fc1d9594fddb74e2725c10478e91d -size 714726 diff --git a/models/object_tracking_vittrack/object_tracking_vittrack_2023sep_int8bq.onnx b/models/object_tracking_vittrack/object_tracking_vittrack_2023sep_int8bq.onnx deleted file mode 100644 index 23b52694..00000000 --- a/models/object_tracking_vittrack/object_tracking_vittrack_2023sep_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:54e8d58892a49de71fadf6673ba10193f7899324a4c1b8fe8c2f2d8d5d661fb4 -size 271327 diff --git a/models/object_tracking_vittrack/vittrack.py b/models/object_tracking_vittrack/vittrack.py deleted file mode 100644 index e710e995..00000000 --- a/models/object_tracking_vittrack/vittrack.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. - -import numpy as np -import cv2 as cv - -class VitTrack: - def __init__(self, model_path, backend_id=0, target_id=0): - self.model_path = model_path - self.backend_id = backend_id - self.target_id = target_id - - self.params = cv.TrackerVit_Params() - self.params.net = self.model_path - self.params.backend = self.backend_id - self.params.target = self.target_id - - self.model = cv.TrackerVit_create(self.params) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backend_id, target_id): - self.backend_id = backend_id - self.target_id = target_id - - self.params.backend = self.backend_id - self.params.target = self.target_id - - self.model = cv.TrackerVit_create(self.params) - - def init(self, image, roi): - self.model.init(image, roi) - - def infer(self, image): - is_located, bbox = self.model.update(image) - score = self.model.getTrackingScore() - return is_located, bbox, score diff --git a/models/optical_flow_estimation_raft/BSD-3-LICENSE.txt b/models/optical_flow_estimation_raft/BSD-3-LICENSE.txt deleted file mode 100644 index ed13d840..00000000 --- a/models/optical_flow_estimation_raft/BSD-3-LICENSE.txt +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2020, princeton-vl -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/models/optical_flow_estimation_raft/MITLICENSE.txt b/models/optical_flow_estimation_raft/MITLICENSE.txt deleted file mode 100644 index 2aa0e4e0..00000000 --- a/models/optical_flow_estimation_raft/MITLICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Jeong-gi Kwak - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/models/optical_flow_estimation_raft/README.md b/models/optical_flow_estimation_raft/README.md deleted file mode 100644 index bc4f8406..00000000 --- a/models/optical_flow_estimation_raft/README.md +++ /dev/null @@ -1,70 +0,0 @@ -# RAFT -This model is originally created by Zachary Teed and Jia Deng of Princeton University. The source code for the model is at [their repository on GitHub](https://github.com/princeton-vl/RAFT), and the original [research paper](https://arxiv.org/abs/2003.12039) is published on [Arxiv](https://arxiv.org/abs/2003.12039). The model was converted to ONNX by [PINTO0309](https://github.com/PINTO0309) in his [model zoo](https://github.com/PINTO0309/PINTO_model_zoo/tree/main/252_RAFT). The ONNX model has several variations depending on the training dataset and input dimesnions. The model used in this demo is trained on Sintel dataset with input size of 360 $\times$ 480. - -**Note**: -- `optical_flow_estimation_raft_2023aug_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -Run any of the following commands to try the demo: - -```shell -# run on camera input -python demo.py - -# run on two images and visualize result -python demo.py --input1 /path/to/image1 --input2 /path/to/image2 -vis - -# run on two images and save result -python demo.py --input1 /path/to/image1 --input2 /path/to/image2 -s - -# run on two images and both save and visualize result -python demo.py --input1 /path/to/image1 --input2 /path/to/image2 -s -vis - -# run on one video and visualize result -python demo.py --video /path/to/video -vis - -# run on one video and save result -python demo.py --video /path/to/video -s - -# run on one video and both save and visualize result -python demo.py --video /path/to/video -s -vis - -# get help regarding various parameters -python demo.py --help -``` - -While running on video, you can press q anytime to stop. The model demo runs on camera input, video input, or takes two images to compute optical flow across frames. The save and vis arguments of the shell command are only valid in the case of using video or two images as input. To run a different variation of the model, such as a model trained on a different dataset or with a different input size, refer to [RAFT ONNX in PINTO Model Zoo](https://github.com/PINTO0309/PINTO_model_zoo/tree/main/252_RAFT) to download your chosen model. And if your chosen model has different input shape from 360 $\times$ 480, **change the input shape in raft.py line 15 to the new input shape**. Then, add the model path to the --model argument of the shell command, such as in the following example commands: - -```shell -# run on camera input -python demo.py --model /path/to/model -# run on two images -python demo.py --input1 /path/to/image1 --input2 /path/to/image2 --model /path/to/model -# run on video -python demo.py --video /path/to/video --model /path/to/model -``` - -### Example outputs -The visualization argument displays both image inputs as well as out result. - -![Visualization example](./example_outputs/vis.png) - -The save argument saves the result only. - -![Output example](./example_outputs/result.jpg) - - - -## License - -The original RAFT model is under [BSD-3-Clause license](./BSD-3-LICENSE.txt).
-The conversion of the RAFT model to the ONNX format by [PINTO0309](https://github.com/PINTO0309/PINTO_model_zoo/tree/main/252_RAFT) is under [MIT License](./MITLICENSE.txt).
-Some of the code in demo.py and raft.py is adapted from [ibaiGorordo's repository](https://github.com/ibaiGorordo/ONNX-RAFT-Optical-Flow-Estimation/tree/main) under [BSD-3-Clause license](./BSD-3-LICENSE.txt).
- -## Reference - -- https://arxiv.org/abs/2003.12039 -- https://github.com/princeton-vl/RAFT -- https://github.com/ibaiGorordo/ONNX-RAFT-Optical-Flow-Estimation/tree/main -- https://github.com/PINTO0309/PINTO_model_zoo/tree/main/252_RAFT diff --git a/models/optical_flow_estimation_raft/demo.py b/models/optical_flow_estimation_raft/demo.py deleted file mode 100644 index 821be26c..00000000 --- a/models/optical_flow_estimation_raft/demo.py +++ /dev/null @@ -1,315 +0,0 @@ -import argparse - -import cv2 as cv -import numpy as np - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from raft import Raft - -parser = argparse.ArgumentParser(description='RAFT (https://github.com/princeton-vl/RAFT)') -parser.add_argument('--input1', '-i1', type=str, - help='Usage: Set input1 path to first image, omit if using camera or video.') -parser.add_argument('--input2', '-i2', type=str, - help='Usage: Set input2 path to second image, omit if using camera or video.') -parser.add_argument('--video', '-vid', type=str, - help='Usage: Set video path to desired input video, omit if using camera or two image inputs.') -parser.add_argument('--model', '-m', type=str, default='optical_flow_estimation_raft_2023aug.onnx', - help='Usage: Set model path, defaults to optical_flow_estimation_raft_2023aug.onnx.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save a file with results. Invalid in case of camera input.') -parser.add_argument('--visual', '-vis', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -UNKNOWN_FLOW_THRESH = 1e7 - -def make_color_wheel(): - """ Generate color wheel according Middlebury color code. - - Returns: - Color wheel(numpy.ndarray): Color wheel - """ - RY = 15 - YG = 6 - GC = 4 - CB = 11 - BM = 13 - MR = 6 - - ncols = RY + YG + GC + CB + BM + MR - - colorwheel = np.zeros([ncols, 3]) - - col = 0 - - # RY - colorwheel[0:RY, 0] = 255 - colorwheel[0:RY, 1] = np.transpose(np.floor(255*np.arange(0, RY) / RY)) - col += RY - - # YG - colorwheel[col:col+YG, 0] = 255 - np.transpose(np.floor(255*np.arange(0, YG) / YG)) - colorwheel[col:col+YG, 1] = 255 - col += YG - - # GC - colorwheel[col:col+GC, 1] = 255 - colorwheel[col:col+GC, 2] = np.transpose(np.floor(255*np.arange(0, GC) / GC)) - col += GC - - # CB - colorwheel[col:col+CB, 1] = 255 - np.transpose(np.floor(255*np.arange(0, CB) / CB)) - colorwheel[col:col+CB, 2] = 255 - col += CB - - # BM - colorwheel[col:col+BM, 2] = 255 - colorwheel[col:col+BM, 0] = np.transpose(np.floor(255*np.arange(0, BM) / BM)) - col += + BM - - # MR - colorwheel[col:col+MR, 2] = 255 - np.transpose(np.floor(255 * np.arange(0, MR) / MR)) - colorwheel[col:col+MR, 0] = 255 - - return colorwheel - -colorwheel = make_color_wheel() - -def compute_color(u, v): - """ Compute optical flow color map - - Args: - u(numpy.ndarray): Optical flow horizontal map - v(numpy.ndarray): Optical flow vertical map - - Returns: - img (numpy.ndarray): Optical flow in color code - """ - [h, w] = u.shape - img = np.zeros([h, w, 3]) - nanIdx = np.isnan(u) | np.isnan(v) - u[nanIdx] = 0 - v[nanIdx] = 0 - - ncols = np.size(colorwheel, 0) - - rad = np.sqrt(u**2+v**2) - - a = np.arctan2(-v, -u) / np.pi - - fk = (a+1) / 2 * (ncols - 1) + 1 - - k0 = np.floor(fk).astype(int) - - k1 = k0 + 1 - k1[k1 == ncols+1] = 1 - f = fk - k0 - - for i in range(0, np.size(colorwheel,1)): - tmp = colorwheel[:, i] - col0 = tmp[k0-1] / 255 - col1 = tmp[k1-1] / 255 - col = (1-f) * col0 + f * col1 - - idx = rad <= 1 - col[idx] = 1-rad[idx]*(1-col[idx]) - notidx = np.logical_not(idx) - - col[notidx] *= 0.75 - img[:, :, i] = np.uint8(np.floor(255 * col*(1-nanIdx))) - - return img - -def flow_to_image(flow): - """Convert flow into middlebury color code image - - Args: - flow (np.ndarray): The computed flow map - - Returns: - (np.ndarray): Image corresponding to the flow map. - """ - u = flow[:, :, 0] - v = flow[:, :, 1] - - maxu = -999. - maxv = -999. - minu = 999. - minv = 999. - - idxUnknow = (abs(u) > UNKNOWN_FLOW_THRESH) | (abs(v) > UNKNOWN_FLOW_THRESH) - u[idxUnknow] = 0 - v[idxUnknow] = 0 - - maxu = max(maxu, np.max(u)) - minu = min(minu, np.min(u)) - - maxv = max(maxv, np.max(v)) - minv = min(minv, np.min(v)) - - rad = np.sqrt(u ** 2 + v ** 2) - maxrad = max(-1, np.max(rad)) - - u = u/(maxrad + np.finfo(float).eps) - v = v/(maxrad + np.finfo(float).eps) - - img = compute_color(u, v) - - idx = np.repeat(idxUnknow[:, :, np.newaxis], 3, axis=2) - img[idx] = 0 - - return np.uint8(img) - - -def draw_flow(flow_map, img_width, img_height): - """Convert flow map to image - - Args: - flow_map (np.ndarray): The computed flow map - img_width (int): The width of the first input photo - img_height (int): The height of the first input photo - - Returns: - (np.ndarray): Image corresponding to the flow map. - """ - # Convert flow to image - flow_img = flow_to_image(flow_map) - # Convert to BGR - flow_img = cv.cvtColor(flow_img, cv.COLOR_RGB2BGR) - # Resize the depth map to match the input image shape - return cv.resize(flow_img, (img_width, img_height)) - - -def visualize(image1, image2, flow_img): - """ - Combine two input images with resulting flow img and display them together - - Args: - image1 (np.ndarray): The first input image. - imag2 (np.ndarray): The second input image. - flow_img (np.ndarray): The output flow map drawn as an image - - Returns: - combined_img (np.ndarray): The visualized result. - """ - combined_img = np.hstack((image1, image2, flow_img)) - cv.namedWindow("Estimated flow", cv.WINDOW_NORMAL) - cv.imshow("Estimated flow", combined_img) - cv.waitKey(0) - return combined_img - - -if __name__ == '__main__': - # Instantiate RAFT - model = Raft(modelPath=args.model) - - if args.input1 is not None and args.input2 is not None: - # Read image - image1 = cv.imread(args.input1) - image2 = cv.imread(args.input2) - img_height, img_width, img_channels = image1.shape - - # Inference - result = model.infer(image1, image2) - - # Create flow image based on the result flow map - flow_image = draw_flow(result, img_width, img_height) - - # Save results if save is true - if args.save: - print('Results saved to result.jpg\n') - cv.imwrite('result.jpg', flow_image) - - # Visualize results in a new window - if args.visual: - input_output_visualization = visualize(image1, image2, flow_image) - - - elif args.video is not None: - cap = cv.VideoCapture(args.video) - FLOW_FRAME_OFFSET = 3 # Number of frame difference to estimate the optical flow - - if args.visual: - cv.namedWindow("Estimated flow", cv.WINDOW_NORMAL) - - frame_list = [] - img_array = [] - frame_num = 0 - while cap.isOpened(): - try: - # Read frame from the video - ret, prev_frame = cap.read() - frame_list.append(prev_frame) - if not ret: - break - except: - continue - - frame_num += 1 - if frame_num <= FLOW_FRAME_OFFSET: - continue - else: - frame_num = 0 - - result = model.infer(frame_list[0], frame_list[-1]) - img_height, img_width, img_channels = frame_list[0].shape - flow_img = draw_flow(result, img_width, img_height) - - alpha = 0.6 - combined_img = cv.addWeighted(frame_list[0], alpha, flow_img, (1-alpha),0) - - if args.visual: - cv.imshow("Estimated flow", combined_img) - img_array.append(combined_img) - # Remove the oldest frame - frame_list.pop(0) - - # Press key q to stop - if cv.waitKey(1) == ord('q'): - break - - cap.release() - - if args.save: - fourcc = cv.VideoWriter_fourcc(*'mp4v') - height,width,layers= img_array[0].shape - video = cv.VideoWriter('result.mp4', fourcc, 30.0, (width, height), isColor=True) - for img in img_array: - video.write(img) - video.release() - - cv.destroyAllWindows() - - - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - w = int(cap.get(cv.CAP_PROP_FRAME_WIDTH)) - h = int(cap.get(cv.CAP_PROP_FRAME_HEIGHT)) - - tm = cv.TickMeter() - while cv.waitKey(30) < 0: - hasFrame1, frame1 = cap.read() - hasFrame2, frame2 = cap.read() - if not hasFrame1: - print('First frame was not grabbed!') - break - - if not hasFrame2: - print('Second frame was not grabbed!') - break - - # Inference - tm.start() - result = model.infer(frame1, frame2) - tm.stop() - result = draw_flow(result, w, h) - - # Draw results on the input image - frame = visualize(frame1, frame2, result) - - tm.reset() diff --git a/models/optical_flow_estimation_raft/example_outputs/result.jpg b/models/optical_flow_estimation_raft/example_outputs/result.jpg deleted file mode 100644 index 893c7b15..00000000 --- a/models/optical_flow_estimation_raft/example_outputs/result.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b5b83f6dfeefaffc7e181f343fe134168f713ad01a74b34afb7fecccfa67268d -size 35139 diff --git a/models/optical_flow_estimation_raft/example_outputs/vis.png b/models/optical_flow_estimation_raft/example_outputs/vis.png deleted file mode 100644 index a9860bc2..00000000 --- a/models/optical_flow_estimation_raft/example_outputs/vis.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ecc4e01e285eeaddd195c14095d71a7a89c1c1c41fa45764f702931b62bcaa5d -size 1827108 diff --git a/models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug.onnx b/models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug.onnx deleted file mode 100644 index b084b822..00000000 --- a/models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3b479e0447894245b822bf0b7392155d31ccd22bb9b0ee55503066bf6f6f869a -size 64119337 diff --git a/models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug_int8bq.onnx b/models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug_int8bq.onnx deleted file mode 100644 index f16aaed3..00000000 --- a/models/optical_flow_estimation_raft/optical_flow_estimation_raft_2023aug_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d54cf6e44be6da7efa8843b696b24cf24a8cf03e877dd5207ae5556cfa6f11ec -size 48845109 diff --git a/models/optical_flow_estimation_raft/raft.py b/models/optical_flow_estimation_raft/raft.py deleted file mode 100644 index e0557cce..00000000 --- a/models/optical_flow_estimation_raft/raft.py +++ /dev/null @@ -1,53 +0,0 @@ -# This file is part of OpenCV Zoo project. - -import cv2 as cv -import numpy as np - - -class Raft: - def __init__(self, modelPath): - self._modelPath = modelPath - self.model = cv.dnn.readNet(self._modelPath) - - self.input_names = ['0', '1'] - self.first_input_name = self.input_names[0] - self.second_input_name = self.input_names[1] - self.input_shape = [360, 480] # change if going to use different model with different input shape - self.input_height = self.input_shape[0] - self.input_width = self.input_shape[1] - - @property - def name(self): - return self.__class__.__name__ - - def _preprocess(self, image): - - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - img_input = cv.resize(image, (self.input_width,self.input_height)) - img_input = img_input.transpose(2, 0, 1) - img_input = img_input[np.newaxis,:,:,:] - img_input = img_input.astype(np.float32) - return img_input - - def infer(self, image1, image2): - - # Preprocess - input_1 = self._preprocess(image1) - input_2 = self._preprocess(image2) - - # Forward - self.model.setInput(input_1, self.first_input_name) - self.model.setInput(input_2, self.second_input_name) - layer_names = self.model.getLayerNames() - outputlayers = [layer_names[i-1] for i in self.model.getUnconnectedOutLayers()] - output = self.model.forward(outputlayers) - - # Postprocess - results = self._postprocess(output) - - return results - - def _postprocess(self, output): - - flow_map = output[1][0].transpose(1, 2, 0) - return flow_map \ No newline at end of file diff --git a/models/palm_detection_mediapipe/CMakeLists.txt b/models/palm_detection_mediapipe/CMakeLists.txt deleted file mode 100644 index 823d7e17..00000000 --- a/models/palm_detection_mediapipe/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.24.0) -project(opencv_zoo_qrcode_wechatqrcode) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(demo demo.cpp) -target_link_libraries(demo ${OpenCV_LIBS}) diff --git a/models/palm_detection_mediapipe/LICENSE b/models/palm_detection_mediapipe/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/palm_detection_mediapipe/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/palm_detection_mediapipe/README.md b/models/palm_detection_mediapipe/README.md deleted file mode 100644 index ff889c13..00000000 --- a/models/palm_detection_mediapipe/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# Palm detector from MediaPipe Handpose - -This model detects palm bounding boxes and palm landmarks, and is converted from TFLite to ONNX using following tools: - -- TFLite model to ONNX: https://github.com/onnx/tensorflow-onnx -- simplified by [onnx-simplifier](https://github.com/daquexian/onnx-simplifier) - -SSD Anchors are generated from [GenMediaPipePalmDectionSSDAnchors](https://github.com/VimalMollyn/GenMediaPipePalmDectionSSDAnchors) - -**Note**: -- Visit https://github.com/google/mediapipe/blob/master/docs/solutions/models.md#hands for models of larger scale. -- `palm_detection_mediapipe_2023feb_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -### Python - -Run the following commands to try the demo: - -```bash -# detect on camera input -python demo.py -# detect on an image -python demo.py -i /path/to/image -v - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -Install latest OpenCV (with opencv_contrib) and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/demo -# detect on an image -./build/demo -i=/path/to/image -v -# get help messages -./build/demo -h -``` - -### Example outputs - -![webcam demo](./example_outputs/mppalmdet_demo.gif) - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- MediaPipe Handpose: https://developers.google.com/mediapipe/solutions/vision/hand_landmarker -- MediaPipe hands model and model card: https://github.com/google/mediapipe/blob/master/docs/solutions/models.md#hands -- Handpose TFJS:https://github.com/tensorflow/tfjs-models/tree/master/handpose -- Int8 model quantized with rgb evaluation set of FreiHAND: https://lmb.informatik.uni-freiburg.de/resources/datasets/FreihandDataset.en.html \ No newline at end of file diff --git a/models/palm_detection_mediapipe/demo.cpp b/models/palm_detection_mediapipe/demo.cpp deleted file mode 100644 index 96bd07ee..00000000 --- a/models/palm_detection_mediapipe/demo.cpp +++ /dev/null @@ -1,2379 +0,0 @@ -#include -#include -#include -#include -#include - -const std::vector> - backend_target_pairs = { - {cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_CPU}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA_FP16}, - {cv::dnn::DNN_BACKEND_TIMVX, cv::dnn::DNN_TARGET_NPU}, - {cv::dnn::DNN_BACKEND_CANN, cv::dnn::DNN_TARGET_NPU}}; - -class MPPalmDet { - private: - std::string model_path; - float nms_threshold; - float score_threshold; - int topK; - int backend_id; - int target_id; - cv::Size input_size; - cv::dnn::Net model; - std::vector anchors; - - std::vector loadAnchors(); - - std::pair preprocess(const cv::Mat& image) { - cv::Point2i pad_bias(0, 0); - float ratio = - std::min(static_cast(input_size.width) / image.cols, - static_cast(input_size.height) / image.rows); - - cv::Mat processed_image; - if (image.rows != input_size.height || image.cols != input_size.width) { - cv::Size ratio_size(static_cast(image.cols * ratio), - static_cast(image.rows * ratio)); - cv::resize(image, processed_image, ratio_size); - - int pad_h = input_size.height - ratio_size.height; - int pad_w = input_size.width - ratio_size.width; - pad_bias.x = pad_w / 2; - pad_bias.y = pad_h / 2; - - cv::copyMakeBorder(processed_image, processed_image, pad_bias.y, - pad_h - pad_bias.y, pad_bias.x, - pad_w - pad_bias.x, cv::BORDER_CONSTANT, - cv::Scalar(0, 0, 0)); - } else { - processed_image = image.clone(); - } - - // Create blob with correct parameters - cv::Mat blob; - cv::dnn::Image2BlobParams params; - params.datalayout = cv::dnn::DNN_LAYOUT_NHWC; - params.ddepth = CV_32F; - params.mean = cv::Scalar::all(0); - params.scalefactor = cv::Scalar::all(1.0 / 255.0); - params.size = input_size; - params.swapRB = true; - params.paddingmode = cv::dnn::DNN_PMODE_NULL; - - blob = cv::dnn::blobFromImageWithParams(processed_image, params); - - pad_bias.x = static_cast(pad_bias.x / ratio); - pad_bias.y = static_cast(pad_bias.y / ratio); - - return {blob, pad_bias}; - } - - std::vector> postprocess( - const std::vector& output_blobs, const cv::Size& original_size, - const cv::Point2i& pad_bias) { - cv::Mat scores = - output_blobs[1].reshape(1, output_blobs[1].total() / 1); - cv::Mat boxes = - output_blobs[0].reshape(1, output_blobs[0].total() / 18); - - std::vector score_vec; - std::vector boxes_vec; - std::vector> landmarks_vec; - - float scale = std::max(original_size.height, original_size.width); - - // Process all detections first - for (int i = 0; i < scores.rows; i++) { - float score = 1.0f / (1.0f + std::exp(-scores.at(i, 0))); - - // Extract box and landmark deltas - cv::Mat box_delta = boxes.row(i).colRange(0, 4); - cv::Mat landmark_delta = boxes.row(i).colRange(4, 18); - cv::Point2f anchor = anchors[i]; - - // Normalize box deltas by input size - cv::Point2f cxy_delta(box_delta.at(0) / input_size.width, - box_delta.at(1) / input_size.height); - cv::Point2f wh_delta(box_delta.at(2) / input_size.width, - box_delta.at(3) / input_size.height); - - // Calculate box coordinates (scale first, then subtract pad_bias) - cv::Point2f xy1( - (cxy_delta.x - wh_delta.x / 2 + anchor.x) * scale - pad_bias.x, - (cxy_delta.y - wh_delta.y / 2 + anchor.y) * scale - pad_bias.y); - cv::Point2f xy2( - (cxy_delta.x + wh_delta.x / 2 + anchor.x) * scale - pad_bias.x, - (cxy_delta.y + wh_delta.y / 2 + anchor.y) * scale - pad_bias.y); - - if (score > score_threshold) { - score_vec.push_back(score); - boxes_vec.push_back( - cv::Rect2f(xy1.x, xy1.y, xy2.x - xy1.x, xy2.y - xy1.y)); - - // Process landmarks - std::vector landmarks; - for (int j = 0; j < 7; j++) { - // Normalize by input size - float dx = - landmark_delta.at(j * 2) / input_size.width; - float dy = - landmark_delta.at(j * 2 + 1) / input_size.height; - - // Add anchor - dx += anchor.x; - dy += anchor.y; - - // Scale and subtract pad_bias in one step - dx = dx * scale - pad_bias.x; - dy = dy * scale - pad_bias.y; - - landmarks.push_back(cv::Point2f(dx, dy)); - } - landmarks_vec.push_back(landmarks); - } - } - - // Perform NMS - std::vector indices; - std::vector boxes_int; - for (const auto& box : boxes_vec) { - boxes_int.push_back(cv::Rect( - static_cast(box.x), static_cast(box.y), - static_cast(box.width), static_cast(box.height))); - } - cv::dnn::NMSBoxes(boxes_int, score_vec, score_threshold, nms_threshold, - indices); - - // Prepare results - std::vector> results; - for (int idx : indices) { - std::vector result; - result.push_back(boxes_vec[idx].x); - result.push_back(boxes_vec[idx].y); - result.push_back(boxes_vec[idx].x + boxes_vec[idx].width); - result.push_back(boxes_vec[idx].y + boxes_vec[idx].height); - - for (const auto& point : landmarks_vec[idx]) { - result.push_back(point.x); - result.push_back(point.y); - } - result.push_back(score_vec[idx]); - results.push_back(result); - } - - return results; - } - - public: - MPPalmDet(const std::string& modelPath, float nmsThreshold = 0.3f, - float scoreThreshold = 0.5f, int topK = 5000, - int backendId = cv::dnn::DNN_BACKEND_DEFAULT, - int targetId = cv::dnn::DNN_TARGET_CPU) - : model_path(modelPath), - nms_threshold(nmsThreshold), - score_threshold(scoreThreshold), - topK(topK), - backend_id(backendId), - target_id(targetId), - input_size(192, 192) { - model = cv::dnn::readNet(model_path); - model.setPreferableBackend(backend_id); - model.setPreferableTarget(target_id); - anchors = loadAnchors(); - } - - void setBackendAndTarget(int backendId, int targetId) { - backend_id = backendId; - target_id = targetId; - model.setPreferableBackend(backend_id); - model.setPreferableTarget(target_id); - } - - std::vector> infer(const cv::Mat& image) { - std::pair preprocess_result = preprocess(image); - cv::Mat preprocessed_image = preprocess_result.first; - cv::Point2i pad_bias = preprocess_result.second; - model.setInput(preprocessed_image); - std::vector outputs; - model.forward(outputs, model.getUnconnectedOutLayersNames()); - return postprocess(outputs, image.size(), pad_bias); - } -}; - -cv::Mat visualize(const cv::Mat& image, - const std::vector>& results, - bool print_results = false, float fps = 0.0f) { - cv::Mat output = image.clone(); - - if (fps > 0) { - cv::putText(output, cv::format("FPS: %.2f", fps), cv::Point(0, 15), - cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255)); - } - - for (size_t i = 0; i < results.size(); i++) { - const std::vector& result = results[i]; - float score = result.back(); - - // Draw box - using direct coordinates - cv::rectangle( - output, - cv::Point(static_cast(result[0]), static_cast(result[1])), - cv::Point(static_cast(result[2]), static_cast(result[3])), - cv::Scalar(0, 255, 0), 2); - - // Put score - using first coordinate of box - cv::putText(output, cv::format("%.4f", score), - cv::Point(static_cast(result[0]), - static_cast(result[1]) + 12), - cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(0, 255, 0)); - - // Draw landmarks - for (size_t j = 0; j < 7; j++) { - cv::Point point(static_cast(result[4 + j * 2]), - static_cast(result[4 + j * 2 + 1])); - cv::circle(output, point, 2, cv::Scalar(0, 0, 255), 2); - } - - if (print_results) { - std::cout << "-----------palm " << i + 1 << "-----------\n"; - std::cout << "score: " << score << "\n"; - std::cout << "palm box: [" << result[0] << ", " << result[1] << ", " - << result[2] << ", " << result[3] << "]\n"; - std::cout << "palm landmarks:\n"; - for (size_t j = 0; j < 7; j++) { - std::cout << "\t(" << result[4 + j * 2] << ", " - << result[4 + j * 2 + 1] << ")\n"; - } - } - } - - return output; -} - -int main(int argc, char** argv) { - cv::CommandLineParser parser( - argc, argv, - "{help h usage ? | | print this message }" - "{input i | | path to input image }" - "{model m | palm_detection_mediapipe_2023feb.onnx | path to " - "model file }" - "{backend_target bt | 0 | backend-target pair (0:OpenCV CPU, 1:CUDA, " - "2:CUDA FP16, 3:TIM-VX NPU, 4:CANN NPU) }" - "{score_threshold | 0.8 | minimum confidence threshold }" - "{nms_threshold | 0.3 | NMS threshold }" - "{save s | | save results to file }" - "{vis v | | visualize results }"); - - if (parser.has("help")) { - parser.printMessage(); - return 0; - } - - int backend_target = parser.get("backend_target"); - if (backend_target < 0 || backend_target >= backend_target_pairs.size()) { - std::cerr << "Error: Invalid backend_target value" << std::endl; - return -1; - } - - int backend_id = backend_target_pairs[backend_target].first; - int target_id = backend_target_pairs[backend_target].second; - - // Create detector directly - MPPalmDet detector(parser.get("model"), - parser.get("nms_threshold"), - parser.get("score_threshold"), - 5000, // topK - backend_id, target_id); - - // Process image if input is provided - if (parser.has("input")) { - cv::Mat image = cv::imread(parser.get("input")); - if (image.empty()) { - std::cerr << "Error: Could not read image: " - << parser.get("input") << std::endl; - return -1; - } - - std::vector> results = detector.infer(image); - if (results.empty()) { - std::cout << "Hand not detected" << std::endl; - } - - cv::Mat output = visualize(image, results, true); - - if (parser.has("save")) { - cv::imwrite("result.jpg", output); - std::cout << "Results saved to result.jpg\n" << std::endl; - } - - if (parser.has("vis")) { - cv::namedWindow(parser.get("input"), - cv::WINDOW_AUTOSIZE); - cv::imshow(parser.get("input"), output); - cv::waitKey(0); - } - } - // Process camera feed if no input image - else { - cv::VideoCapture cap(0); - if (!cap.isOpened()) { - std::cerr << "Error: Could not open camera" << std::endl; - return -1; - } - - std::chrono::steady_clock::time_point start_time = - std::chrono::steady_clock::now(); - int frame_count = 0; - - while (true) { - cv::Mat frame; - cap >> frame; - if (frame.empty()) { - std::cout << "No frames grabbed!" << std::endl; - break; - } - - std::vector> results = detector.infer(frame); - frame_count++; - - std::chrono::steady_clock::time_point current_time = - std::chrono::steady_clock::now(); - float fps = - frame_count / (std::chrono::duration_cast( - current_time - start_time) - .count() + - 1); - - cv::Mat output = visualize(frame, results, false, fps); - cv::imshow("MPPalmDet Demo", output); - - if (cv::waitKey(1) >= 0) break; - } - } - - return 0; -} - -std::vector MPPalmDet::loadAnchors() { - std::vector anchors = { -{0.02083333f, 0.02083333f}, - {0.02083333f, 0.02083333f}, - {0.06250000f, 0.02083333f}, - {0.06250000f, 0.02083333f}, - {0.10416666f, 0.02083333f}, - {0.10416666f, 0.02083333f}, - {0.14583333f, 0.02083333f}, - {0.14583333f, 0.02083333f}, - {0.18750000f, 0.02083333f}, - {0.18750000f, 0.02083333f}, - {0.22916667f, 0.02083333f}, - {0.22916667f, 0.02083333f}, - {0.27083334f, 0.02083333f}, - {0.27083334f, 0.02083333f}, - {0.31250000f, 0.02083333f}, - {0.31250000f, 0.02083333f}, - {0.35416666f, 0.02083333f}, - {0.35416666f, 0.02083333f}, - {0.39583334f, 0.02083333f}, - {0.39583334f, 0.02083333f}, - {0.43750000f, 0.02083333f}, - {0.43750000f, 0.02083333f}, - {0.47916666f, 0.02083333f}, - {0.47916666f, 0.02083333f}, - {0.52083330f, 0.02083333f}, - {0.52083330f, 0.02083333f}, - {0.56250000f, 0.02083333f}, - {0.56250000f, 0.02083333f}, - {0.60416670f, 0.02083333f}, - {0.60416670f, 0.02083333f}, - {0.64583330f, 0.02083333f}, - {0.64583330f, 0.02083333f}, - {0.68750000f, 0.02083333f}, - {0.68750000f, 0.02083333f}, - {0.72916670f, 0.02083333f}, - {0.72916670f, 0.02083333f}, - {0.77083330f, 0.02083333f}, - {0.77083330f, 0.02083333f}, - {0.81250000f, 0.02083333f}, - {0.81250000f, 0.02083333f}, - {0.85416670f, 0.02083333f}, - {0.85416670f, 0.02083333f}, - {0.89583330f, 0.02083333f}, - {0.89583330f, 0.02083333f}, - {0.93750000f, 0.02083333f}, - {0.93750000f, 0.02083333f}, - {0.97916670f, 0.02083333f}, - {0.97916670f, 0.02083333f}, - {0.02083333f, 0.06250000f}, - {0.02083333f, 0.06250000f}, - {0.06250000f, 0.06250000f}, - {0.06250000f, 0.06250000f}, - {0.10416666f, 0.06250000f}, - {0.10416666f, 0.06250000f}, - {0.14583333f, 0.06250000f}, - {0.14583333f, 0.06250000f}, - {0.18750000f, 0.06250000f}, - {0.18750000f, 0.06250000f}, - {0.22916667f, 0.06250000f}, - {0.22916667f, 0.06250000f}, - {0.27083334f, 0.06250000f}, - {0.27083334f, 0.06250000f}, - {0.31250000f, 0.06250000f}, - {0.31250000f, 0.06250000f}, - {0.35416666f, 0.06250000f}, - {0.35416666f, 0.06250000f}, - {0.39583334f, 0.06250000f}, - {0.39583334f, 0.06250000f}, - {0.43750000f, 0.06250000f}, - {0.43750000f, 0.06250000f}, - {0.47916666f, 0.06250000f}, - {0.47916666f, 0.06250000f}, - {0.52083330f, 0.06250000f}, - {0.52083330f, 0.06250000f}, - {0.56250000f, 0.06250000f}, - {0.56250000f, 0.06250000f}, - {0.60416670f, 0.06250000f}, - {0.60416670f, 0.06250000f}, - {0.64583330f, 0.06250000f}, - {0.64583330f, 0.06250000f}, - {0.68750000f, 0.06250000f}, - {0.68750000f, 0.06250000f}, - {0.72916670f, 0.06250000f}, - {0.72916670f, 0.06250000f}, - {0.77083330f, 0.06250000f}, - {0.77083330f, 0.06250000f}, - {0.81250000f, 0.06250000f}, - {0.81250000f, 0.06250000f}, - {0.85416670f, 0.06250000f}, - {0.85416670f, 0.06250000f}, - {0.89583330f, 0.06250000f}, - {0.89583330f, 0.06250000f}, - {0.93750000f, 0.06250000f}, - {0.93750000f, 0.06250000f}, - {0.97916670f, 0.06250000f}, - {0.97916670f, 0.06250000f}, - {0.02083333f, 0.10416666f}, - {0.02083333f, 0.10416666f}, - {0.06250000f, 0.10416666f}, - {0.06250000f, 0.10416666f}, - {0.10416666f, 0.10416666f}, - {0.10416666f, 0.10416666f}, - {0.14583333f, 0.10416666f}, - {0.14583333f, 0.10416666f}, - {0.18750000f, 0.10416666f}, - {0.18750000f, 0.10416666f}, - {0.22916667f, 0.10416666f}, - {0.22916667f, 0.10416666f}, - {0.27083334f, 0.10416666f}, - {0.27083334f, 0.10416666f}, - {0.31250000f, 0.10416666f}, - {0.31250000f, 0.10416666f}, - {0.35416666f, 0.10416666f}, - {0.35416666f, 0.10416666f}, - {0.39583334f, 0.10416666f}, - {0.39583334f, 0.10416666f}, - {0.43750000f, 0.10416666f}, - {0.43750000f, 0.10416666f}, - {0.47916666f, 0.10416666f}, - {0.47916666f, 0.10416666f}, - {0.52083330f, 0.10416666f}, - {0.52083330f, 0.10416666f}, - {0.56250000f, 0.10416666f}, - {0.56250000f, 0.10416666f}, - {0.60416670f, 0.10416666f}, - {0.60416670f, 0.10416666f}, - {0.64583330f, 0.10416666f}, - {0.64583330f, 0.10416666f}, - {0.68750000f, 0.10416666f}, - {0.68750000f, 0.10416666f}, - {0.72916670f, 0.10416666f}, - {0.72916670f, 0.10416666f}, - {0.77083330f, 0.10416666f}, - {0.77083330f, 0.10416666f}, - {0.81250000f, 0.10416666f}, - {0.81250000f, 0.10416666f}, - {0.85416670f, 0.10416666f}, - {0.85416670f, 0.10416666f}, - {0.89583330f, 0.10416666f}, - {0.89583330f, 0.10416666f}, - {0.93750000f, 0.10416666f}, - {0.93750000f, 0.10416666f}, - {0.97916670f, 0.10416666f}, - {0.97916670f, 0.10416666f}, - {0.02083333f, 0.14583333f}, - {0.02083333f, 0.14583333f}, - {0.06250000f, 0.14583333f}, - {0.06250000f, 0.14583333f}, - {0.10416666f, 0.14583333f}, - {0.10416666f, 0.14583333f}, - {0.14583333f, 0.14583333f}, - {0.14583333f, 0.14583333f}, - {0.18750000f, 0.14583333f}, - {0.18750000f, 0.14583333f}, - {0.22916667f, 0.14583333f}, - {0.22916667f, 0.14583333f}, - {0.27083334f, 0.14583333f}, - {0.27083334f, 0.14583333f}, - {0.31250000f, 0.14583333f}, - {0.31250000f, 0.14583333f}, - {0.35416666f, 0.14583333f}, - {0.35416666f, 0.14583333f}, - {0.39583334f, 0.14583333f}, - {0.39583334f, 0.14583333f}, - {0.43750000f, 0.14583333f}, - {0.43750000f, 0.14583333f}, - {0.47916666f, 0.14583333f}, - {0.47916666f, 0.14583333f}, - {0.52083330f, 0.14583333f}, - {0.52083330f, 0.14583333f}, - {0.56250000f, 0.14583333f}, - {0.56250000f, 0.14583333f}, - {0.60416670f, 0.14583333f}, - {0.60416670f, 0.14583333f}, - {0.64583330f, 0.14583333f}, - {0.64583330f, 0.14583333f}, - {0.68750000f, 0.14583333f}, - {0.68750000f, 0.14583333f}, - {0.72916670f, 0.14583333f}, - {0.72916670f, 0.14583333f}, - {0.77083330f, 0.14583333f}, - {0.77083330f, 0.14583333f}, - {0.81250000f, 0.14583333f}, - {0.81250000f, 0.14583333f}, - {0.85416670f, 0.14583333f}, - {0.85416670f, 0.14583333f}, - {0.89583330f, 0.14583333f}, - {0.89583330f, 0.14583333f}, - {0.93750000f, 0.14583333f}, - {0.93750000f, 0.14583333f}, - {0.97916670f, 0.14583333f}, - {0.97916670f, 0.14583333f}, - {0.02083333f, 0.18750000f}, - {0.02083333f, 0.18750000f}, - {0.06250000f, 0.18750000f}, - {0.06250000f, 0.18750000f}, - {0.10416666f, 0.18750000f}, - {0.10416666f, 0.18750000f}, - {0.14583333f, 0.18750000f}, - {0.14583333f, 0.18750000f}, - {0.18750000f, 0.18750000f}, - {0.18750000f, 0.18750000f}, - {0.22916667f, 0.18750000f}, - {0.22916667f, 0.18750000f}, - {0.27083334f, 0.18750000f}, - {0.27083334f, 0.18750000f}, - {0.31250000f, 0.18750000f}, - {0.31250000f, 0.18750000f}, - {0.35416666f, 0.18750000f}, - {0.35416666f, 0.18750000f}, - {0.39583334f, 0.18750000f}, - {0.39583334f, 0.18750000f}, - {0.43750000f, 0.18750000f}, - {0.43750000f, 0.18750000f}, - {0.47916666f, 0.18750000f}, - {0.47916666f, 0.18750000f}, - {0.52083330f, 0.18750000f}, - {0.52083330f, 0.18750000f}, - {0.56250000f, 0.18750000f}, - {0.56250000f, 0.18750000f}, - {0.60416670f, 0.18750000f}, - {0.60416670f, 0.18750000f}, - {0.64583330f, 0.18750000f}, - {0.64583330f, 0.18750000f}, - {0.68750000f, 0.18750000f}, - {0.68750000f, 0.18750000f}, - {0.72916670f, 0.18750000f}, - {0.72916670f, 0.18750000f}, - {0.77083330f, 0.18750000f}, - {0.77083330f, 0.18750000f}, - {0.81250000f, 0.18750000f}, - {0.81250000f, 0.18750000f}, - {0.85416670f, 0.18750000f}, - {0.85416670f, 0.18750000f}, - {0.89583330f, 0.18750000f}, - {0.89583330f, 0.18750000f}, - {0.93750000f, 0.18750000f}, - {0.93750000f, 0.18750000f}, - {0.97916670f, 0.18750000f}, - {0.97916670f, 0.18750000f}, - {0.02083333f, 0.22916667f}, - {0.02083333f, 0.22916667f}, - {0.06250000f, 0.22916667f}, - {0.06250000f, 0.22916667f}, - {0.10416666f, 0.22916667f}, - {0.10416666f, 0.22916667f}, - {0.14583333f, 0.22916667f}, - {0.14583333f, 0.22916667f}, - {0.18750000f, 0.22916667f}, - {0.18750000f, 0.22916667f}, - {0.22916667f, 0.22916667f}, - {0.22916667f, 0.22916667f}, - {0.27083334f, 0.22916667f}, - {0.27083334f, 0.22916667f}, - {0.31250000f, 0.22916667f}, - {0.31250000f, 0.22916667f}, - {0.35416666f, 0.22916667f}, - {0.35416666f, 0.22916667f}, - {0.39583334f, 0.22916667f}, - {0.39583334f, 0.22916667f}, - {0.43750000f, 0.22916667f}, - {0.43750000f, 0.22916667f}, - {0.47916666f, 0.22916667f}, - {0.47916666f, 0.22916667f}, - {0.52083330f, 0.22916667f}, - {0.52083330f, 0.22916667f}, - {0.56250000f, 0.22916667f}, - {0.56250000f, 0.22916667f}, - {0.60416670f, 0.22916667f}, - {0.60416670f, 0.22916667f}, - {0.64583330f, 0.22916667f}, - {0.64583330f, 0.22916667f}, - {0.68750000f, 0.22916667f}, - {0.68750000f, 0.22916667f}, - {0.72916670f, 0.22916667f}, - {0.72916670f, 0.22916667f}, - {0.77083330f, 0.22916667f}, - {0.77083330f, 0.22916667f}, - {0.81250000f, 0.22916667f}, - {0.81250000f, 0.22916667f}, - {0.85416670f, 0.22916667f}, - {0.85416670f, 0.22916667f}, - {0.89583330f, 0.22916667f}, - {0.89583330f, 0.22916667f}, - {0.93750000f, 0.22916667f}, - {0.93750000f, 0.22916667f}, - {0.97916670f, 0.22916667f}, - {0.97916670f, 0.22916667f}, - {0.02083333f, 0.27083334f}, - {0.02083333f, 0.27083334f}, - {0.06250000f, 0.27083334f}, - {0.06250000f, 0.27083334f}, - {0.10416666f, 0.27083334f}, - {0.10416666f, 0.27083334f}, - {0.14583333f, 0.27083334f}, - {0.14583333f, 0.27083334f}, - {0.18750000f, 0.27083334f}, - {0.18750000f, 0.27083334f}, - {0.22916667f, 0.27083334f}, - {0.22916667f, 0.27083334f}, - {0.27083334f, 0.27083334f}, - {0.27083334f, 0.27083334f}, - {0.31250000f, 0.27083334f}, - {0.31250000f, 0.27083334f}, - {0.35416666f, 0.27083334f}, - {0.35416666f, 0.27083334f}, - {0.39583334f, 0.27083334f}, - {0.39583334f, 0.27083334f}, - {0.43750000f, 0.27083334f}, - {0.43750000f, 0.27083334f}, - {0.47916666f, 0.27083334f}, - {0.47916666f, 0.27083334f}, - {0.52083330f, 0.27083334f}, - {0.52083330f, 0.27083334f}, - {0.56250000f, 0.27083334f}, - {0.56250000f, 0.27083334f}, - {0.60416670f, 0.27083334f}, - {0.60416670f, 0.27083334f}, - {0.64583330f, 0.27083334f}, - {0.64583330f, 0.27083334f}, - {0.68750000f, 0.27083334f}, - {0.68750000f, 0.27083334f}, - {0.72916670f, 0.27083334f}, - {0.72916670f, 0.27083334f}, - {0.77083330f, 0.27083334f}, - {0.77083330f, 0.27083334f}, - {0.81250000f, 0.27083334f}, - {0.81250000f, 0.27083334f}, - {0.85416670f, 0.27083334f}, - {0.85416670f, 0.27083334f}, - {0.89583330f, 0.27083334f}, - {0.89583330f, 0.27083334f}, - {0.93750000f, 0.27083334f}, - {0.93750000f, 0.27083334f}, - {0.97916670f, 0.27083334f}, - {0.97916670f, 0.27083334f}, - {0.02083333f, 0.31250000f}, - {0.02083333f, 0.31250000f}, - {0.06250000f, 0.31250000f}, - {0.06250000f, 0.31250000f}, - {0.10416666f, 0.31250000f}, - {0.10416666f, 0.31250000f}, - {0.14583333f, 0.31250000f}, - {0.14583333f, 0.31250000f}, - {0.18750000f, 0.31250000f}, - {0.18750000f, 0.31250000f}, - {0.22916667f, 0.31250000f}, - {0.22916667f, 0.31250000f}, - {0.27083334f, 0.31250000f}, - {0.27083334f, 0.31250000f}, - {0.31250000f, 0.31250000f}, - {0.31250000f, 0.31250000f}, - {0.35416666f, 0.31250000f}, - {0.35416666f, 0.31250000f}, - {0.39583334f, 0.31250000f}, - {0.39583334f, 0.31250000f}, - {0.43750000f, 0.31250000f}, - {0.43750000f, 0.31250000f}, - {0.47916666f, 0.31250000f}, - {0.47916666f, 0.31250000f}, - {0.52083330f, 0.31250000f}, - {0.52083330f, 0.31250000f}, - {0.56250000f, 0.31250000f}, - {0.56250000f, 0.31250000f}, - {0.60416670f, 0.31250000f}, - {0.60416670f, 0.31250000f}, - {0.64583330f, 0.31250000f}, - {0.64583330f, 0.31250000f}, - {0.68750000f, 0.31250000f}, - {0.68750000f, 0.31250000f}, - {0.72916670f, 0.31250000f}, - {0.72916670f, 0.31250000f}, - {0.77083330f, 0.31250000f}, - {0.77083330f, 0.31250000f}, - {0.81250000f, 0.31250000f}, - {0.81250000f, 0.31250000f}, - {0.85416670f, 0.31250000f}, - {0.85416670f, 0.31250000f}, - {0.89583330f, 0.31250000f}, - {0.89583330f, 0.31250000f}, - {0.93750000f, 0.31250000f}, - {0.93750000f, 0.31250000f}, - {0.97916670f, 0.31250000f}, - {0.97916670f, 0.31250000f}, - {0.02083333f, 0.35416666f}, - {0.02083333f, 0.35416666f}, - {0.06250000f, 0.35416666f}, - {0.06250000f, 0.35416666f}, - {0.10416666f, 0.35416666f}, - {0.10416666f, 0.35416666f}, - {0.14583333f, 0.35416666f}, - {0.14583333f, 0.35416666f}, - {0.18750000f, 0.35416666f}, - {0.18750000f, 0.35416666f}, - {0.22916667f, 0.35416666f}, - {0.22916667f, 0.35416666f}, - {0.27083334f, 0.35416666f}, - {0.27083334f, 0.35416666f}, - {0.31250000f, 0.35416666f}, - {0.31250000f, 0.35416666f}, - {0.35416666f, 0.35416666f}, - {0.35416666f, 0.35416666f}, - {0.39583334f, 0.35416666f}, - {0.39583334f, 0.35416666f}, - {0.43750000f, 0.35416666f}, - {0.43750000f, 0.35416666f}, - {0.47916666f, 0.35416666f}, - {0.47916666f, 0.35416666f}, - {0.52083330f, 0.35416666f}, - {0.52083330f, 0.35416666f}, - {0.56250000f, 0.35416666f}, - {0.56250000f, 0.35416666f}, - {0.60416670f, 0.35416666f}, - {0.60416670f, 0.35416666f}, - {0.64583330f, 0.35416666f}, - {0.64583330f, 0.35416666f}, - {0.68750000f, 0.35416666f}, - {0.68750000f, 0.35416666f}, - {0.72916670f, 0.35416666f}, - {0.72916670f, 0.35416666f}, - {0.77083330f, 0.35416666f}, - {0.77083330f, 0.35416666f}, - {0.81250000f, 0.35416666f}, - {0.81250000f, 0.35416666f}, - {0.85416670f, 0.35416666f}, - {0.85416670f, 0.35416666f}, - {0.89583330f, 0.35416666f}, - {0.89583330f, 0.35416666f}, - {0.93750000f, 0.35416666f}, - {0.93750000f, 0.35416666f}, - {0.97916670f, 0.35416666f}, - {0.97916670f, 0.35416666f}, - {0.02083333f, 0.39583334f}, - {0.02083333f, 0.39583334f}, - {0.06250000f, 0.39583334f}, - {0.06250000f, 0.39583334f}, - {0.10416666f, 0.39583334f}, - {0.10416666f, 0.39583334f}, - {0.14583333f, 0.39583334f}, - {0.14583333f, 0.39583334f}, - {0.18750000f, 0.39583334f}, - {0.18750000f, 0.39583334f}, - {0.22916667f, 0.39583334f}, - {0.22916667f, 0.39583334f}, - {0.27083334f, 0.39583334f}, - {0.27083334f, 0.39583334f}, - {0.31250000f, 0.39583334f}, - {0.31250000f, 0.39583334f}, - {0.35416666f, 0.39583334f}, - {0.35416666f, 0.39583334f}, - {0.39583334f, 0.39583334f}, - {0.39583334f, 0.39583334f}, - {0.43750000f, 0.39583334f}, - {0.43750000f, 0.39583334f}, - {0.47916666f, 0.39583334f}, - {0.47916666f, 0.39583334f}, - {0.52083330f, 0.39583334f}, - {0.52083330f, 0.39583334f}, - {0.56250000f, 0.39583334f}, - {0.56250000f, 0.39583334f}, - {0.60416670f, 0.39583334f}, - {0.60416670f, 0.39583334f}, - {0.64583330f, 0.39583334f}, - {0.64583330f, 0.39583334f}, - {0.68750000f, 0.39583334f}, - {0.68750000f, 0.39583334f}, - {0.72916670f, 0.39583334f}, - {0.72916670f, 0.39583334f}, - {0.77083330f, 0.39583334f}, - {0.77083330f, 0.39583334f}, - {0.81250000f, 0.39583334f}, - {0.81250000f, 0.39583334f}, - {0.85416670f, 0.39583334f}, - {0.85416670f, 0.39583334f}, - {0.89583330f, 0.39583334f}, - {0.89583330f, 0.39583334f}, - {0.93750000f, 0.39583334f}, - {0.93750000f, 0.39583334f}, - {0.97916670f, 0.39583334f}, - {0.97916670f, 0.39583334f}, - {0.02083333f, 0.43750000f}, - {0.02083333f, 0.43750000f}, - {0.06250000f, 0.43750000f}, - {0.06250000f, 0.43750000f}, - {0.10416666f, 0.43750000f}, - {0.10416666f, 0.43750000f}, - {0.14583333f, 0.43750000f}, - {0.14583333f, 0.43750000f}, - {0.18750000f, 0.43750000f}, - {0.18750000f, 0.43750000f}, - {0.22916667f, 0.43750000f}, - {0.22916667f, 0.43750000f}, - {0.27083334f, 0.43750000f}, - {0.27083334f, 0.43750000f}, - {0.31250000f, 0.43750000f}, - {0.31250000f, 0.43750000f}, - {0.35416666f, 0.43750000f}, - {0.35416666f, 0.43750000f}, - {0.39583334f, 0.43750000f}, - {0.39583334f, 0.43750000f}, - {0.43750000f, 0.43750000f}, - {0.43750000f, 0.43750000f}, - {0.47916666f, 0.43750000f}, - {0.47916666f, 0.43750000f}, - {0.52083330f, 0.43750000f}, - {0.52083330f, 0.43750000f}, - {0.56250000f, 0.43750000f}, - {0.56250000f, 0.43750000f}, - {0.60416670f, 0.43750000f}, - {0.60416670f, 0.43750000f}, - {0.64583330f, 0.43750000f}, - {0.64583330f, 0.43750000f}, - {0.68750000f, 0.43750000f}, - {0.68750000f, 0.43750000f}, - {0.72916670f, 0.43750000f}, - {0.72916670f, 0.43750000f}, - {0.77083330f, 0.43750000f}, - {0.77083330f, 0.43750000f}, - {0.81250000f, 0.43750000f}, - {0.81250000f, 0.43750000f}, - {0.85416670f, 0.43750000f}, - {0.85416670f, 0.43750000f}, - {0.89583330f, 0.43750000f}, - {0.89583330f, 0.43750000f}, - {0.93750000f, 0.43750000f}, - {0.93750000f, 0.43750000f}, - {0.97916670f, 0.43750000f}, - {0.97916670f, 0.43750000f}, - {0.02083333f, 0.47916666f}, - {0.02083333f, 0.47916666f}, - {0.06250000f, 0.47916666f}, - {0.06250000f, 0.47916666f}, - {0.10416666f, 0.47916666f}, - {0.10416666f, 0.47916666f}, - {0.14583333f, 0.47916666f}, - {0.14583333f, 0.47916666f}, - {0.18750000f, 0.47916666f}, - {0.18750000f, 0.47916666f}, - {0.22916667f, 0.47916666f}, - {0.22916667f, 0.47916666f}, - {0.27083334f, 0.47916666f}, - {0.27083334f, 0.47916666f}, - {0.31250000f, 0.47916666f}, - {0.31250000f, 0.47916666f}, - {0.35416666f, 0.47916666f}, - {0.35416666f, 0.47916666f}, - {0.39583334f, 0.47916666f}, - {0.39583334f, 0.47916666f}, - {0.43750000f, 0.47916666f}, - {0.43750000f, 0.47916666f}, - {0.47916666f, 0.47916666f}, - {0.47916666f, 0.47916666f}, - {0.52083330f, 0.47916666f}, - {0.52083330f, 0.47916666f}, - {0.56250000f, 0.47916666f}, - {0.56250000f, 0.47916666f}, - {0.60416670f, 0.47916666f}, - {0.60416670f, 0.47916666f}, - {0.64583330f, 0.47916666f}, - {0.64583330f, 0.47916666f}, - {0.68750000f, 0.47916666f}, - {0.68750000f, 0.47916666f}, - {0.72916670f, 0.47916666f}, - {0.72916670f, 0.47916666f}, - {0.77083330f, 0.47916666f}, - {0.77083330f, 0.47916666f}, - {0.81250000f, 0.47916666f}, - {0.81250000f, 0.47916666f}, - {0.85416670f, 0.47916666f}, - {0.85416670f, 0.47916666f}, - {0.89583330f, 0.47916666f}, - {0.89583330f, 0.47916666f}, - {0.93750000f, 0.47916666f}, - {0.93750000f, 0.47916666f}, - {0.97916670f, 0.47916666f}, - {0.97916670f, 0.47916666f}, - {0.02083333f, 0.52083330f}, - {0.02083333f, 0.52083330f}, - {0.06250000f, 0.52083330f}, - {0.06250000f, 0.52083330f}, - {0.10416666f, 0.52083330f}, - {0.10416666f, 0.52083330f}, - {0.14583333f, 0.52083330f}, - {0.14583333f, 0.52083330f}, - {0.18750000f, 0.52083330f}, - {0.18750000f, 0.52083330f}, - {0.22916667f, 0.52083330f}, - {0.22916667f, 0.52083330f}, - {0.27083334f, 0.52083330f}, - {0.27083334f, 0.52083330f}, - {0.31250000f, 0.52083330f}, - {0.31250000f, 0.52083330f}, - {0.35416666f, 0.52083330f}, - {0.35416666f, 0.52083330f}, - {0.39583334f, 0.52083330f}, - {0.39583334f, 0.52083330f}, - {0.43750000f, 0.52083330f}, - {0.43750000f, 0.52083330f}, - {0.47916666f, 0.52083330f}, - {0.47916666f, 0.52083330f}, - {0.52083330f, 0.52083330f}, - {0.52083330f, 0.52083330f}, - {0.56250000f, 0.52083330f}, - {0.56250000f, 0.52083330f}, - {0.60416670f, 0.52083330f}, - {0.60416670f, 0.52083330f}, - {0.64583330f, 0.52083330f}, - {0.64583330f, 0.52083330f}, - {0.68750000f, 0.52083330f}, - {0.68750000f, 0.52083330f}, - {0.72916670f, 0.52083330f}, - {0.72916670f, 0.52083330f}, - {0.77083330f, 0.52083330f}, - {0.77083330f, 0.52083330f}, - {0.81250000f, 0.52083330f}, - {0.81250000f, 0.52083330f}, - {0.85416670f, 0.52083330f}, - {0.85416670f, 0.52083330f}, - {0.89583330f, 0.52083330f}, - {0.89583330f, 0.52083330f}, - {0.93750000f, 0.52083330f}, - {0.93750000f, 0.52083330f}, - {0.97916670f, 0.52083330f}, - {0.97916670f, 0.52083330f}, - {0.02083333f, 0.56250000f}, - {0.02083333f, 0.56250000f}, - {0.06250000f, 0.56250000f}, - {0.06250000f, 0.56250000f}, - {0.10416666f, 0.56250000f}, - {0.10416666f, 0.56250000f}, - {0.14583333f, 0.56250000f}, - {0.14583333f, 0.56250000f}, - {0.18750000f, 0.56250000f}, - {0.18750000f, 0.56250000f}, - {0.22916667f, 0.56250000f}, - {0.22916667f, 0.56250000f}, - {0.27083334f, 0.56250000f}, - {0.27083334f, 0.56250000f}, - {0.31250000f, 0.56250000f}, - {0.31250000f, 0.56250000f}, - {0.35416666f, 0.56250000f}, - {0.35416666f, 0.56250000f}, - {0.39583334f, 0.56250000f}, - {0.39583334f, 0.56250000f}, - {0.43750000f, 0.56250000f}, - {0.43750000f, 0.56250000f}, - {0.47916666f, 0.56250000f}, - {0.47916666f, 0.56250000f}, - {0.52083330f, 0.56250000f}, - {0.52083330f, 0.56250000f}, - {0.56250000f, 0.56250000f}, - {0.56250000f, 0.56250000f}, - {0.60416670f, 0.56250000f}, - {0.60416670f, 0.56250000f}, - {0.64583330f, 0.56250000f}, - {0.64583330f, 0.56250000f}, - {0.68750000f, 0.56250000f}, - {0.68750000f, 0.56250000f}, - {0.72916670f, 0.56250000f}, - {0.72916670f, 0.56250000f}, - {0.77083330f, 0.56250000f}, - {0.77083330f, 0.56250000f}, - {0.81250000f, 0.56250000f}, - {0.81250000f, 0.56250000f}, - {0.85416670f, 0.56250000f}, - {0.85416670f, 0.56250000f}, - {0.89583330f, 0.56250000f}, - {0.89583330f, 0.56250000f}, - {0.93750000f, 0.56250000f}, - {0.93750000f, 0.56250000f}, - {0.97916670f, 0.56250000f}, - {0.97916670f, 0.56250000f}, - {0.02083333f, 0.60416670f}, - {0.02083333f, 0.60416670f}, - {0.06250000f, 0.60416670f}, - {0.06250000f, 0.60416670f}, - {0.10416666f, 0.60416670f}, - {0.10416666f, 0.60416670f}, - {0.14583333f, 0.60416670f}, - {0.14583333f, 0.60416670f}, - {0.18750000f, 0.60416670f}, - {0.18750000f, 0.60416670f}, - {0.22916667f, 0.60416670f}, - {0.22916667f, 0.60416670f}, - {0.27083334f, 0.60416670f}, - {0.27083334f, 0.60416670f}, - {0.31250000f, 0.60416670f}, - {0.31250000f, 0.60416670f}, - {0.35416666f, 0.60416670f}, - {0.35416666f, 0.60416670f}, - {0.39583334f, 0.60416670f}, - {0.39583334f, 0.60416670f}, - {0.43750000f, 0.60416670f}, - {0.43750000f, 0.60416670f}, - {0.47916666f, 0.60416670f}, - {0.47916666f, 0.60416670f}, - {0.52083330f, 0.60416670f}, - {0.52083330f, 0.60416670f}, - {0.56250000f, 0.60416670f}, - {0.56250000f, 0.60416670f}, - {0.60416670f, 0.60416670f}, - {0.60416670f, 0.60416670f}, - {0.64583330f, 0.60416670f}, - {0.64583330f, 0.60416670f}, - {0.68750000f, 0.60416670f}, - {0.68750000f, 0.60416670f}, - {0.72916670f, 0.60416670f}, - {0.72916670f, 0.60416670f}, - {0.77083330f, 0.60416670f}, - {0.77083330f, 0.60416670f}, - {0.81250000f, 0.60416670f}, - {0.81250000f, 0.60416670f}, - {0.85416670f, 0.60416670f}, - {0.85416670f, 0.60416670f}, - {0.89583330f, 0.60416670f}, - {0.89583330f, 0.60416670f}, - {0.93750000f, 0.60416670f}, - {0.93750000f, 0.60416670f}, - {0.97916670f, 0.60416670f}, - {0.97916670f, 0.60416670f}, - {0.02083333f, 0.64583330f}, - {0.02083333f, 0.64583330f}, - {0.06250000f, 0.64583330f}, - {0.06250000f, 0.64583330f}, - {0.10416666f, 0.64583330f}, - {0.10416666f, 0.64583330f}, - {0.14583333f, 0.64583330f}, - {0.14583333f, 0.64583330f}, - {0.18750000f, 0.64583330f}, - {0.18750000f, 0.64583330f}, - {0.22916667f, 0.64583330f}, - {0.22916667f, 0.64583330f}, - {0.27083334f, 0.64583330f}, - {0.27083334f, 0.64583330f}, - {0.31250000f, 0.64583330f}, - {0.31250000f, 0.64583330f}, - {0.35416666f, 0.64583330f}, - {0.35416666f, 0.64583330f}, - {0.39583334f, 0.64583330f}, - {0.39583334f, 0.64583330f}, - {0.43750000f, 0.64583330f}, - {0.43750000f, 0.64583330f}, - {0.47916666f, 0.64583330f}, - {0.47916666f, 0.64583330f}, - {0.52083330f, 0.64583330f}, - {0.52083330f, 0.64583330f}, - {0.56250000f, 0.64583330f}, - {0.56250000f, 0.64583330f}, - {0.60416670f, 0.64583330f}, - {0.60416670f, 0.64583330f}, - {0.64583330f, 0.64583330f}, - {0.64583330f, 0.64583330f}, - {0.68750000f, 0.64583330f}, - {0.68750000f, 0.64583330f}, - {0.72916670f, 0.64583330f}, - {0.72916670f, 0.64583330f}, - {0.77083330f, 0.64583330f}, - {0.77083330f, 0.64583330f}, - {0.81250000f, 0.64583330f}, - {0.81250000f, 0.64583330f}, - {0.85416670f, 0.64583330f}, - {0.85416670f, 0.64583330f}, - {0.89583330f, 0.64583330f}, - {0.89583330f, 0.64583330f}, - {0.93750000f, 0.64583330f}, - {0.93750000f, 0.64583330f}, - {0.97916670f, 0.64583330f}, - {0.97916670f, 0.64583330f}, - {0.02083333f, 0.68750000f}, - {0.02083333f, 0.68750000f}, - {0.06250000f, 0.68750000f}, - {0.06250000f, 0.68750000f}, - {0.10416666f, 0.68750000f}, - {0.10416666f, 0.68750000f}, - {0.14583333f, 0.68750000f}, - {0.14583333f, 0.68750000f}, - {0.18750000f, 0.68750000f}, - {0.18750000f, 0.68750000f}, - {0.22916667f, 0.68750000f}, - {0.22916667f, 0.68750000f}, - {0.27083334f, 0.68750000f}, - {0.27083334f, 0.68750000f}, - {0.31250000f, 0.68750000f}, - {0.31250000f, 0.68750000f}, - {0.35416666f, 0.68750000f}, - {0.35416666f, 0.68750000f}, - {0.39583334f, 0.68750000f}, - {0.39583334f, 0.68750000f}, - {0.43750000f, 0.68750000f}, - {0.43750000f, 0.68750000f}, - {0.47916666f, 0.68750000f}, - {0.47916666f, 0.68750000f}, - {0.52083330f, 0.68750000f}, - {0.52083330f, 0.68750000f}, - {0.56250000f, 0.68750000f}, - {0.56250000f, 0.68750000f}, - {0.60416670f, 0.68750000f}, - {0.60416670f, 0.68750000f}, - {0.64583330f, 0.68750000f}, - {0.64583330f, 0.68750000f}, - {0.68750000f, 0.68750000f}, - {0.68750000f, 0.68750000f}, - {0.72916670f, 0.68750000f}, - {0.72916670f, 0.68750000f}, - {0.77083330f, 0.68750000f}, - {0.77083330f, 0.68750000f}, - {0.81250000f, 0.68750000f}, - {0.81250000f, 0.68750000f}, - {0.85416670f, 0.68750000f}, - {0.85416670f, 0.68750000f}, - {0.89583330f, 0.68750000f}, - {0.89583330f, 0.68750000f}, - {0.93750000f, 0.68750000f}, - {0.93750000f, 0.68750000f}, - {0.97916670f, 0.68750000f}, - {0.97916670f, 0.68750000f}, - {0.02083333f, 0.72916670f}, - {0.02083333f, 0.72916670f}, - {0.06250000f, 0.72916670f}, - {0.06250000f, 0.72916670f}, - {0.10416666f, 0.72916670f}, - {0.10416666f, 0.72916670f}, - {0.14583333f, 0.72916670f}, - {0.14583333f, 0.72916670f}, - {0.18750000f, 0.72916670f}, - {0.18750000f, 0.72916670f}, - {0.22916667f, 0.72916670f}, - {0.22916667f, 0.72916670f}, - {0.27083334f, 0.72916670f}, - {0.27083334f, 0.72916670f}, - {0.31250000f, 0.72916670f}, - {0.31250000f, 0.72916670f}, - {0.35416666f, 0.72916670f}, - {0.35416666f, 0.72916670f}, - {0.39583334f, 0.72916670f}, - {0.39583334f, 0.72916670f}, - {0.43750000f, 0.72916670f}, - {0.43750000f, 0.72916670f}, - {0.47916666f, 0.72916670f}, - {0.47916666f, 0.72916670f}, - {0.52083330f, 0.72916670f}, - {0.52083330f, 0.72916670f}, - {0.56250000f, 0.72916670f}, - {0.56250000f, 0.72916670f}, - {0.60416670f, 0.72916670f}, - {0.60416670f, 0.72916670f}, - {0.64583330f, 0.72916670f}, - {0.64583330f, 0.72916670f}, - {0.68750000f, 0.72916670f}, - {0.68750000f, 0.72916670f}, - {0.72916670f, 0.72916670f}, - {0.72916670f, 0.72916670f}, - {0.77083330f, 0.72916670f}, - {0.77083330f, 0.72916670f}, - {0.81250000f, 0.72916670f}, - {0.81250000f, 0.72916670f}, - {0.85416670f, 0.72916670f}, - {0.85416670f, 0.72916670f}, - {0.89583330f, 0.72916670f}, - {0.89583330f, 0.72916670f}, - {0.93750000f, 0.72916670f}, - {0.93750000f, 0.72916670f}, - {0.97916670f, 0.72916670f}, - {0.97916670f, 0.72916670f}, - {0.02083333f, 0.77083330f}, - {0.02083333f, 0.77083330f}, - {0.06250000f, 0.77083330f}, - {0.06250000f, 0.77083330f}, - {0.10416666f, 0.77083330f}, - {0.10416666f, 0.77083330f}, - {0.14583333f, 0.77083330f}, - {0.14583333f, 0.77083330f}, - {0.18750000f, 0.77083330f}, - {0.18750000f, 0.77083330f}, - {0.22916667f, 0.77083330f}, - {0.22916667f, 0.77083330f}, - {0.27083334f, 0.77083330f}, - {0.27083334f, 0.77083330f}, - {0.31250000f, 0.77083330f}, - {0.31250000f, 0.77083330f}, - {0.35416666f, 0.77083330f}, - {0.35416666f, 0.77083330f}, - {0.39583334f, 0.77083330f}, - {0.39583334f, 0.77083330f}, - {0.43750000f, 0.77083330f}, - {0.43750000f, 0.77083330f}, - {0.47916666f, 0.77083330f}, - {0.47916666f, 0.77083330f}, - {0.52083330f, 0.77083330f}, - {0.52083330f, 0.77083330f}, - {0.56250000f, 0.77083330f}, - {0.56250000f, 0.77083330f}, - {0.60416670f, 0.77083330f}, - {0.60416670f, 0.77083330f}, - {0.64583330f, 0.77083330f}, - {0.64583330f, 0.77083330f}, - {0.68750000f, 0.77083330f}, - {0.68750000f, 0.77083330f}, - {0.72916670f, 0.77083330f}, - {0.72916670f, 0.77083330f}, - {0.77083330f, 0.77083330f}, - {0.77083330f, 0.77083330f}, - {0.81250000f, 0.77083330f}, - {0.81250000f, 0.77083330f}, - {0.85416670f, 0.77083330f}, - {0.85416670f, 0.77083330f}, - {0.89583330f, 0.77083330f}, - {0.89583330f, 0.77083330f}, - {0.93750000f, 0.77083330f}, - {0.93750000f, 0.77083330f}, - {0.97916670f, 0.77083330f}, - {0.97916670f, 0.77083330f}, - {0.02083333f, 0.81250000f}, - {0.02083333f, 0.81250000f}, - {0.06250000f, 0.81250000f}, - {0.06250000f, 0.81250000f}, - {0.10416666f, 0.81250000f}, - {0.10416666f, 0.81250000f}, - {0.14583333f, 0.81250000f}, - {0.14583333f, 0.81250000f}, - {0.18750000f, 0.81250000f}, - {0.18750000f, 0.81250000f}, - {0.22916667f, 0.81250000f}, - {0.22916667f, 0.81250000f}, - {0.27083334f, 0.81250000f}, - {0.27083334f, 0.81250000f}, - {0.31250000f, 0.81250000f}, - {0.31250000f, 0.81250000f}, - {0.35416666f, 0.81250000f}, - {0.35416666f, 0.81250000f}, - {0.39583334f, 0.81250000f}, - {0.39583334f, 0.81250000f}, - {0.43750000f, 0.81250000f}, - {0.43750000f, 0.81250000f}, - {0.47916666f, 0.81250000f}, - {0.47916666f, 0.81250000f}, - {0.52083330f, 0.81250000f}, - {0.52083330f, 0.81250000f}, - {0.56250000f, 0.81250000f}, - {0.56250000f, 0.81250000f}, - {0.60416670f, 0.81250000f}, - {0.60416670f, 0.81250000f}, - {0.64583330f, 0.81250000f}, - {0.64583330f, 0.81250000f}, - {0.68750000f, 0.81250000f}, - {0.68750000f, 0.81250000f}, - {0.72916670f, 0.81250000f}, - {0.72916670f, 0.81250000f}, - {0.77083330f, 0.81250000f}, - {0.77083330f, 0.81250000f}, - {0.81250000f, 0.81250000f}, - {0.81250000f, 0.81250000f}, - {0.85416670f, 0.81250000f}, - {0.85416670f, 0.81250000f}, - {0.89583330f, 0.81250000f}, - {0.89583330f, 0.81250000f}, - {0.93750000f, 0.81250000f}, - {0.93750000f, 0.81250000f}, - {0.97916670f, 0.81250000f}, - {0.97916670f, 0.81250000f}, - {0.02083333f, 0.85416670f}, - {0.02083333f, 0.85416670f}, - {0.06250000f, 0.85416670f}, - {0.06250000f, 0.85416670f}, - {0.10416666f, 0.85416670f}, - {0.10416666f, 0.85416670f}, - {0.14583333f, 0.85416670f}, - {0.14583333f, 0.85416670f}, - {0.18750000f, 0.85416670f}, - {0.18750000f, 0.85416670f}, - {0.22916667f, 0.85416670f}, - {0.22916667f, 0.85416670f}, - {0.27083334f, 0.85416670f}, - {0.27083334f, 0.85416670f}, - {0.31250000f, 0.85416670f}, - {0.31250000f, 0.85416670f}, - {0.35416666f, 0.85416670f}, - {0.35416666f, 0.85416670f}, - {0.39583334f, 0.85416670f}, - {0.39583334f, 0.85416670f}, - {0.43750000f, 0.85416670f}, - {0.43750000f, 0.85416670f}, - {0.47916666f, 0.85416670f}, - {0.47916666f, 0.85416670f}, - {0.52083330f, 0.85416670f}, - {0.52083330f, 0.85416670f}, - {0.56250000f, 0.85416670f}, - {0.56250000f, 0.85416670f}, - {0.60416670f, 0.85416670f}, - {0.60416670f, 0.85416670f}, - {0.64583330f, 0.85416670f}, - {0.64583330f, 0.85416670f}, - {0.68750000f, 0.85416670f}, - {0.68750000f, 0.85416670f}, - {0.72916670f, 0.85416670f}, - {0.72916670f, 0.85416670f}, - {0.77083330f, 0.85416670f}, - {0.77083330f, 0.85416670f}, - {0.81250000f, 0.85416670f}, - {0.81250000f, 0.85416670f}, - {0.85416670f, 0.85416670f}, - {0.85416670f, 0.85416670f}, - {0.89583330f, 0.85416670f}, - {0.89583330f, 0.85416670f}, - {0.93750000f, 0.85416670f}, - {0.93750000f, 0.85416670f}, - {0.97916670f, 0.85416670f}, - {0.97916670f, 0.85416670f}, - {0.02083333f, 0.89583330f}, - {0.02083333f, 0.89583330f}, - {0.06250000f, 0.89583330f}, - {0.06250000f, 0.89583330f}, - {0.10416666f, 0.89583330f}, - {0.10416666f, 0.89583330f}, - {0.14583333f, 0.89583330f}, - {0.14583333f, 0.89583330f}, - {0.18750000f, 0.89583330f}, - {0.18750000f, 0.89583330f}, - {0.22916667f, 0.89583330f}, - {0.22916667f, 0.89583330f}, - {0.27083334f, 0.89583330f}, - {0.27083334f, 0.89583330f}, - {0.31250000f, 0.89583330f}, - {0.31250000f, 0.89583330f}, - {0.35416666f, 0.89583330f}, - {0.35416666f, 0.89583330f}, - {0.39583334f, 0.89583330f}, - {0.39583334f, 0.89583330f}, - {0.43750000f, 0.89583330f}, - {0.43750000f, 0.89583330f}, - {0.47916666f, 0.89583330f}, - {0.47916666f, 0.89583330f}, - {0.52083330f, 0.89583330f}, - {0.52083330f, 0.89583330f}, - {0.56250000f, 0.89583330f}, - {0.56250000f, 0.89583330f}, - {0.60416670f, 0.89583330f}, - {0.60416670f, 0.89583330f}, - {0.64583330f, 0.89583330f}, - {0.64583330f, 0.89583330f}, - {0.68750000f, 0.89583330f}, - {0.68750000f, 0.89583330f}, - {0.72916670f, 0.89583330f}, - {0.72916670f, 0.89583330f}, - {0.77083330f, 0.89583330f}, - {0.77083330f, 0.89583330f}, - {0.81250000f, 0.89583330f}, - {0.81250000f, 0.89583330f}, - {0.85416670f, 0.89583330f}, - {0.85416670f, 0.89583330f}, - {0.89583330f, 0.89583330f}, - {0.89583330f, 0.89583330f}, - {0.93750000f, 0.89583330f}, - {0.93750000f, 0.89583330f}, - {0.97916670f, 0.89583330f}, - {0.97916670f, 0.89583330f}, - {0.02083333f, 0.93750000f}, - {0.02083333f, 0.93750000f}, - {0.06250000f, 0.93750000f}, - {0.06250000f, 0.93750000f}, - {0.10416666f, 0.93750000f}, - {0.10416666f, 0.93750000f}, - {0.14583333f, 0.93750000f}, - {0.14583333f, 0.93750000f}, - {0.18750000f, 0.93750000f}, - {0.18750000f, 0.93750000f}, - {0.22916667f, 0.93750000f}, - {0.22916667f, 0.93750000f}, - {0.27083334f, 0.93750000f}, - {0.27083334f, 0.93750000f}, - {0.31250000f, 0.93750000f}, - {0.31250000f, 0.93750000f}, - {0.35416666f, 0.93750000f}, - {0.35416666f, 0.93750000f}, - {0.39583334f, 0.93750000f}, - {0.39583334f, 0.93750000f}, - {0.43750000f, 0.93750000f}, - {0.43750000f, 0.93750000f}, - {0.47916666f, 0.93750000f}, - {0.47916666f, 0.93750000f}, - {0.52083330f, 0.93750000f}, - {0.52083330f, 0.93750000f}, - {0.56250000f, 0.93750000f}, - {0.56250000f, 0.93750000f}, - {0.60416670f, 0.93750000f}, - {0.60416670f, 0.93750000f}, - {0.64583330f, 0.93750000f}, - {0.64583330f, 0.93750000f}, - {0.68750000f, 0.93750000f}, - {0.68750000f, 0.93750000f}, - {0.72916670f, 0.93750000f}, - {0.72916670f, 0.93750000f}, - {0.77083330f, 0.93750000f}, - {0.77083330f, 0.93750000f}, - {0.81250000f, 0.93750000f}, - {0.81250000f, 0.93750000f}, - {0.85416670f, 0.93750000f}, - {0.85416670f, 0.93750000f}, - {0.89583330f, 0.93750000f}, - {0.89583330f, 0.93750000f}, - {0.93750000f, 0.93750000f}, - {0.93750000f, 0.93750000f}, - {0.97916670f, 0.93750000f}, - {0.97916670f, 0.93750000f}, - {0.02083333f, 0.97916670f}, - {0.02083333f, 0.97916670f}, - {0.06250000f, 0.97916670f}, - {0.06250000f, 0.97916670f}, - {0.10416666f, 0.97916670f}, - {0.10416666f, 0.97916670f}, - {0.14583333f, 0.97916670f}, - {0.14583333f, 0.97916670f}, - {0.18750000f, 0.97916670f}, - {0.18750000f, 0.97916670f}, - {0.22916667f, 0.97916670f}, - {0.22916667f, 0.97916670f}, - {0.27083334f, 0.97916670f}, - {0.27083334f, 0.97916670f}, - {0.31250000f, 0.97916670f}, - {0.31250000f, 0.97916670f}, - {0.35416666f, 0.97916670f}, - {0.35416666f, 0.97916670f}, - {0.39583334f, 0.97916670f}, - {0.39583334f, 0.97916670f}, - {0.43750000f, 0.97916670f}, - {0.43750000f, 0.97916670f}, - {0.47916666f, 0.97916670f}, - {0.47916666f, 0.97916670f}, - {0.52083330f, 0.97916670f}, - {0.52083330f, 0.97916670f}, - {0.56250000f, 0.97916670f}, - {0.56250000f, 0.97916670f}, - {0.60416670f, 0.97916670f}, - {0.60416670f, 0.97916670f}, - {0.64583330f, 0.97916670f}, - {0.64583330f, 0.97916670f}, - {0.68750000f, 0.97916670f}, - {0.68750000f, 0.97916670f}, - {0.72916670f, 0.97916670f}, - {0.72916670f, 0.97916670f}, - {0.77083330f, 0.97916670f}, - {0.77083330f, 0.97916670f}, - {0.81250000f, 0.97916670f}, - {0.81250000f, 0.97916670f}, - {0.85416670f, 0.97916670f}, - {0.85416670f, 0.97916670f}, - {0.89583330f, 0.97916670f}, - {0.89583330f, 0.97916670f}, - {0.93750000f, 0.97916670f}, - {0.93750000f, 0.97916670f}, - {0.97916670f, 0.97916670f}, - {0.97916670f, 0.97916670f}, - {0.04166667f, 0.04166667f}, - {0.04166667f, 0.04166667f}, - {0.04166667f, 0.04166667f}, - {0.04166667f, 0.04166667f}, - {0.04166667f, 0.04166667f}, - {0.04166667f, 0.04166667f}, - {0.12500000f, 0.04166667f}, - {0.12500000f, 0.04166667f}, - {0.12500000f, 0.04166667f}, - {0.12500000f, 0.04166667f}, - {0.12500000f, 0.04166667f}, - {0.12500000f, 0.04166667f}, - {0.20833333f, 0.04166667f}, - {0.20833333f, 0.04166667f}, - {0.20833333f, 0.04166667f}, - {0.20833333f, 0.04166667f}, - {0.20833333f, 0.04166667f}, - {0.20833333f, 0.04166667f}, - {0.29166666f, 0.04166667f}, - {0.29166666f, 0.04166667f}, - {0.29166666f, 0.04166667f}, - {0.29166666f, 0.04166667f}, - {0.29166666f, 0.04166667f}, - {0.29166666f, 0.04166667f}, - {0.37500000f, 0.04166667f}, - {0.37500000f, 0.04166667f}, - {0.37500000f, 0.04166667f}, - {0.37500000f, 0.04166667f}, - {0.37500000f, 0.04166667f}, - {0.37500000f, 0.04166667f}, - {0.45833334f, 0.04166667f}, - {0.45833334f, 0.04166667f}, - {0.45833334f, 0.04166667f}, - {0.45833334f, 0.04166667f}, - {0.45833334f, 0.04166667f}, - {0.45833334f, 0.04166667f}, - {0.54166670f, 0.04166667f}, - {0.54166670f, 0.04166667f}, - {0.54166670f, 0.04166667f}, - {0.54166670f, 0.04166667f}, - {0.54166670f, 0.04166667f}, - {0.54166670f, 0.04166667f}, - {0.62500000f, 0.04166667f}, - {0.62500000f, 0.04166667f}, - {0.62500000f, 0.04166667f}, - {0.62500000f, 0.04166667f}, - {0.62500000f, 0.04166667f}, - {0.62500000f, 0.04166667f}, - {0.70833330f, 0.04166667f}, - {0.70833330f, 0.04166667f}, - {0.70833330f, 0.04166667f}, - {0.70833330f, 0.04166667f}, - {0.70833330f, 0.04166667f}, - {0.70833330f, 0.04166667f}, - {0.79166670f, 0.04166667f}, - {0.79166670f, 0.04166667f}, - {0.79166670f, 0.04166667f}, - {0.79166670f, 0.04166667f}, - {0.79166670f, 0.04166667f}, - {0.79166670f, 0.04166667f}, - {0.87500000f, 0.04166667f}, - {0.87500000f, 0.04166667f}, - {0.87500000f, 0.04166667f}, - {0.87500000f, 0.04166667f}, - {0.87500000f, 0.04166667f}, - {0.87500000f, 0.04166667f}, - {0.95833330f, 0.04166667f}, - {0.95833330f, 0.04166667f}, - {0.95833330f, 0.04166667f}, - {0.95833330f, 0.04166667f}, - {0.95833330f, 0.04166667f}, - {0.95833330f, 0.04166667f}, - {0.04166667f, 0.12500000f}, - {0.04166667f, 0.12500000f}, - {0.04166667f, 0.12500000f}, - {0.04166667f, 0.12500000f}, - {0.04166667f, 0.12500000f}, - {0.04166667f, 0.12500000f}, - {0.12500000f, 0.12500000f}, - {0.12500000f, 0.12500000f}, - {0.12500000f, 0.12500000f}, - {0.12500000f, 0.12500000f}, - {0.12500000f, 0.12500000f}, - {0.12500000f, 0.12500000f}, - {0.20833333f, 0.12500000f}, - {0.20833333f, 0.12500000f}, - {0.20833333f, 0.12500000f}, - {0.20833333f, 0.12500000f}, - {0.20833333f, 0.12500000f}, - {0.20833333f, 0.12500000f}, - {0.29166666f, 0.12500000f}, - {0.29166666f, 0.12500000f}, - {0.29166666f, 0.12500000f}, - {0.29166666f, 0.12500000f}, - {0.29166666f, 0.12500000f}, - {0.29166666f, 0.12500000f}, - {0.37500000f, 0.12500000f}, - {0.37500000f, 0.12500000f}, - {0.37500000f, 0.12500000f}, - {0.37500000f, 0.12500000f}, - {0.37500000f, 0.12500000f}, - {0.37500000f, 0.12500000f}, - {0.45833334f, 0.12500000f}, - {0.45833334f, 0.12500000f}, - {0.45833334f, 0.12500000f}, - {0.45833334f, 0.12500000f}, - {0.45833334f, 0.12500000f}, - {0.45833334f, 0.12500000f}, - {0.54166670f, 0.12500000f}, - {0.54166670f, 0.12500000f}, - {0.54166670f, 0.12500000f}, - {0.54166670f, 0.12500000f}, - {0.54166670f, 0.12500000f}, - {0.54166670f, 0.12500000f}, - {0.62500000f, 0.12500000f}, - {0.62500000f, 0.12500000f}, - {0.62500000f, 0.12500000f}, - {0.62500000f, 0.12500000f}, - {0.62500000f, 0.12500000f}, - {0.62500000f, 0.12500000f}, - {0.70833330f, 0.12500000f}, - {0.70833330f, 0.12500000f}, - {0.70833330f, 0.12500000f}, - {0.70833330f, 0.12500000f}, - {0.70833330f, 0.12500000f}, - {0.70833330f, 0.12500000f}, - {0.79166670f, 0.12500000f}, - {0.79166670f, 0.12500000f}, - {0.79166670f, 0.12500000f}, - {0.79166670f, 0.12500000f}, - {0.79166670f, 0.12500000f}, - {0.79166670f, 0.12500000f}, - {0.87500000f, 0.12500000f}, - {0.87500000f, 0.12500000f}, - {0.87500000f, 0.12500000f}, - {0.87500000f, 0.12500000f}, - {0.87500000f, 0.12500000f}, - {0.87500000f, 0.12500000f}, - {0.95833330f, 0.12500000f}, - {0.95833330f, 0.12500000f}, - {0.95833330f, 0.12500000f}, - {0.95833330f, 0.12500000f}, - {0.95833330f, 0.12500000f}, - {0.95833330f, 0.12500000f}, - {0.04166667f, 0.20833333f}, - {0.04166667f, 0.20833333f}, - {0.04166667f, 0.20833333f}, - {0.04166667f, 0.20833333f}, - {0.04166667f, 0.20833333f}, - {0.04166667f, 0.20833333f}, - {0.12500000f, 0.20833333f}, - {0.12500000f, 0.20833333f}, - {0.12500000f, 0.20833333f}, - {0.12500000f, 0.20833333f}, - {0.12500000f, 0.20833333f}, - {0.12500000f, 0.20833333f}, - {0.20833333f, 0.20833333f}, - {0.20833333f, 0.20833333f}, - {0.20833333f, 0.20833333f}, - {0.20833333f, 0.20833333f}, - {0.20833333f, 0.20833333f}, - {0.20833333f, 0.20833333f}, - {0.29166666f, 0.20833333f}, - {0.29166666f, 0.20833333f}, - {0.29166666f, 0.20833333f}, - {0.29166666f, 0.20833333f}, - {0.29166666f, 0.20833333f}, - {0.29166666f, 0.20833333f}, - {0.37500000f, 0.20833333f}, - {0.37500000f, 0.20833333f}, - {0.37500000f, 0.20833333f}, - {0.37500000f, 0.20833333f}, - {0.37500000f, 0.20833333f}, - {0.37500000f, 0.20833333f}, - {0.45833334f, 0.20833333f}, - {0.45833334f, 0.20833333f}, - {0.45833334f, 0.20833333f}, - {0.45833334f, 0.20833333f}, - {0.45833334f, 0.20833333f}, - {0.45833334f, 0.20833333f}, - {0.54166670f, 0.20833333f}, - {0.54166670f, 0.20833333f}, - {0.54166670f, 0.20833333f}, - {0.54166670f, 0.20833333f}, - {0.54166670f, 0.20833333f}, - {0.54166670f, 0.20833333f}, - {0.62500000f, 0.20833333f}, - {0.62500000f, 0.20833333f}, - {0.62500000f, 0.20833333f}, - {0.62500000f, 0.20833333f}, - {0.62500000f, 0.20833333f}, - {0.62500000f, 0.20833333f}, - {0.70833330f, 0.20833333f}, - {0.70833330f, 0.20833333f}, - {0.70833330f, 0.20833333f}, - {0.70833330f, 0.20833333f}, - {0.70833330f, 0.20833333f}, - {0.70833330f, 0.20833333f}, - {0.79166670f, 0.20833333f}, - {0.79166670f, 0.20833333f}, - {0.79166670f, 0.20833333f}, - {0.79166670f, 0.20833333f}, - {0.79166670f, 0.20833333f}, - {0.79166670f, 0.20833333f}, - {0.87500000f, 0.20833333f}, - {0.87500000f, 0.20833333f}, - {0.87500000f, 0.20833333f}, - {0.87500000f, 0.20833333f}, - {0.87500000f, 0.20833333f}, - {0.87500000f, 0.20833333f}, - {0.95833330f, 0.20833333f}, - {0.95833330f, 0.20833333f}, - {0.95833330f, 0.20833333f}, - {0.95833330f, 0.20833333f}, - {0.95833330f, 0.20833333f}, - {0.95833330f, 0.20833333f}, - {0.04166667f, 0.29166666f}, - {0.04166667f, 0.29166666f}, - {0.04166667f, 0.29166666f}, - {0.04166667f, 0.29166666f}, - {0.04166667f, 0.29166666f}, - {0.04166667f, 0.29166666f}, - {0.12500000f, 0.29166666f}, - {0.12500000f, 0.29166666f}, - {0.12500000f, 0.29166666f}, - {0.12500000f, 0.29166666f}, - {0.12500000f, 0.29166666f}, - {0.12500000f, 0.29166666f}, - {0.20833333f, 0.29166666f}, - {0.20833333f, 0.29166666f}, - {0.20833333f, 0.29166666f}, - {0.20833333f, 0.29166666f}, - {0.20833333f, 0.29166666f}, - {0.20833333f, 0.29166666f}, - {0.29166666f, 0.29166666f}, - {0.29166666f, 0.29166666f}, - {0.29166666f, 0.29166666f}, - {0.29166666f, 0.29166666f}, - {0.29166666f, 0.29166666f}, - {0.29166666f, 0.29166666f}, - {0.37500000f, 0.29166666f}, - {0.37500000f, 0.29166666f}, - {0.37500000f, 0.29166666f}, - {0.37500000f, 0.29166666f}, - {0.37500000f, 0.29166666f}, - {0.37500000f, 0.29166666f}, - {0.45833334f, 0.29166666f}, - {0.45833334f, 0.29166666f}, - {0.45833334f, 0.29166666f}, - {0.45833334f, 0.29166666f}, - {0.45833334f, 0.29166666f}, - {0.45833334f, 0.29166666f}, - {0.54166670f, 0.29166666f}, - {0.54166670f, 0.29166666f}, - {0.54166670f, 0.29166666f}, - {0.54166670f, 0.29166666f}, - {0.54166670f, 0.29166666f}, - {0.54166670f, 0.29166666f}, - {0.62500000f, 0.29166666f}, - {0.62500000f, 0.29166666f}, - {0.62500000f, 0.29166666f}, - {0.62500000f, 0.29166666f}, - {0.62500000f, 0.29166666f}, - {0.62500000f, 0.29166666f}, - {0.70833330f, 0.29166666f}, - {0.70833330f, 0.29166666f}, - {0.70833330f, 0.29166666f}, - {0.70833330f, 0.29166666f}, - {0.70833330f, 0.29166666f}, - {0.70833330f, 0.29166666f}, - {0.79166670f, 0.29166666f}, - {0.79166670f, 0.29166666f}, - {0.79166670f, 0.29166666f}, - {0.79166670f, 0.29166666f}, - {0.79166670f, 0.29166666f}, - {0.79166670f, 0.29166666f}, - {0.87500000f, 0.29166666f}, - {0.87500000f, 0.29166666f}, - {0.87500000f, 0.29166666f}, - {0.87500000f, 0.29166666f}, - {0.87500000f, 0.29166666f}, - {0.87500000f, 0.29166666f}, - {0.95833330f, 0.29166666f}, - {0.95833330f, 0.29166666f}, - {0.95833330f, 0.29166666f}, - {0.95833330f, 0.29166666f}, - {0.95833330f, 0.29166666f}, - {0.95833330f, 0.29166666f}, - {0.04166667f, 0.37500000f}, - {0.04166667f, 0.37500000f}, - {0.04166667f, 0.37500000f}, - {0.04166667f, 0.37500000f}, - {0.04166667f, 0.37500000f}, - {0.04166667f, 0.37500000f}, - {0.12500000f, 0.37500000f}, - {0.12500000f, 0.37500000f}, - {0.12500000f, 0.37500000f}, - {0.12500000f, 0.37500000f}, - {0.12500000f, 0.37500000f}, - {0.12500000f, 0.37500000f}, - {0.20833333f, 0.37500000f}, - {0.20833333f, 0.37500000f}, - {0.20833333f, 0.37500000f}, - {0.20833333f, 0.37500000f}, - {0.20833333f, 0.37500000f}, - {0.20833333f, 0.37500000f}, - {0.29166666f, 0.37500000f}, - {0.29166666f, 0.37500000f}, - {0.29166666f, 0.37500000f}, - {0.29166666f, 0.37500000f}, - {0.29166666f, 0.37500000f}, - {0.29166666f, 0.37500000f}, - {0.37500000f, 0.37500000f}, - {0.37500000f, 0.37500000f}, - {0.37500000f, 0.37500000f}, - {0.37500000f, 0.37500000f}, - {0.37500000f, 0.37500000f}, - {0.37500000f, 0.37500000f}, - {0.45833334f, 0.37500000f}, - {0.45833334f, 0.37500000f}, - {0.45833334f, 0.37500000f}, - {0.45833334f, 0.37500000f}, - {0.45833334f, 0.37500000f}, - {0.45833334f, 0.37500000f}, - {0.54166670f, 0.37500000f}, - {0.54166670f, 0.37500000f}, - {0.54166670f, 0.37500000f}, - {0.54166670f, 0.37500000f}, - {0.54166670f, 0.37500000f}, - {0.54166670f, 0.37500000f}, - {0.62500000f, 0.37500000f}, - {0.62500000f, 0.37500000f}, - {0.62500000f, 0.37500000f}, - {0.62500000f, 0.37500000f}, - {0.62500000f, 0.37500000f}, - {0.62500000f, 0.37500000f}, - {0.70833330f, 0.37500000f}, - {0.70833330f, 0.37500000f}, - {0.70833330f, 0.37500000f}, - {0.70833330f, 0.37500000f}, - {0.70833330f, 0.37500000f}, - {0.70833330f, 0.37500000f}, - {0.79166670f, 0.37500000f}, - {0.79166670f, 0.37500000f}, - {0.79166670f, 0.37500000f}, - {0.79166670f, 0.37500000f}, - {0.79166670f, 0.37500000f}, - {0.79166670f, 0.37500000f}, - {0.87500000f, 0.37500000f}, - {0.87500000f, 0.37500000f}, - {0.87500000f, 0.37500000f}, - {0.87500000f, 0.37500000f}, - {0.87500000f, 0.37500000f}, - {0.87500000f, 0.37500000f}, - {0.95833330f, 0.37500000f}, - {0.95833330f, 0.37500000f}, - {0.95833330f, 0.37500000f}, - {0.95833330f, 0.37500000f}, - {0.95833330f, 0.37500000f}, - {0.95833330f, 0.37500000f}, - {0.04166667f, 0.45833334f}, - {0.04166667f, 0.45833334f}, - {0.04166667f, 0.45833334f}, - {0.04166667f, 0.45833334f}, - {0.04166667f, 0.45833334f}, - {0.04166667f, 0.45833334f}, - {0.12500000f, 0.45833334f}, - {0.12500000f, 0.45833334f}, - {0.12500000f, 0.45833334f}, - {0.12500000f, 0.45833334f}, - {0.12500000f, 0.45833334f}, - {0.12500000f, 0.45833334f}, - {0.20833333f, 0.45833334f}, - {0.20833333f, 0.45833334f}, - {0.20833333f, 0.45833334f}, - {0.20833333f, 0.45833334f}, - {0.20833333f, 0.45833334f}, - {0.20833333f, 0.45833334f}, - {0.29166666f, 0.45833334f}, - {0.29166666f, 0.45833334f}, - {0.29166666f, 0.45833334f}, - {0.29166666f, 0.45833334f}, - {0.29166666f, 0.45833334f}, - {0.29166666f, 0.45833334f}, - {0.37500000f, 0.45833334f}, - {0.37500000f, 0.45833334f}, - {0.37500000f, 0.45833334f}, - {0.37500000f, 0.45833334f}, - {0.37500000f, 0.45833334f}, - {0.37500000f, 0.45833334f}, - {0.45833334f, 0.45833334f}, - {0.45833334f, 0.45833334f}, - {0.45833334f, 0.45833334f}, - {0.45833334f, 0.45833334f}, - {0.45833334f, 0.45833334f}, - {0.45833334f, 0.45833334f}, - {0.54166670f, 0.45833334f}, - {0.54166670f, 0.45833334f}, - {0.54166670f, 0.45833334f}, - {0.54166670f, 0.45833334f}, - {0.54166670f, 0.45833334f}, - {0.54166670f, 0.45833334f}, - {0.62500000f, 0.45833334f}, - {0.62500000f, 0.45833334f}, - {0.62500000f, 0.45833334f}, - {0.62500000f, 0.45833334f}, - {0.62500000f, 0.45833334f}, - {0.62500000f, 0.45833334f}, - {0.70833330f, 0.45833334f}, - {0.70833330f, 0.45833334f}, - {0.70833330f, 0.45833334f}, - {0.70833330f, 0.45833334f}, - {0.70833330f, 0.45833334f}, - {0.70833330f, 0.45833334f}, - {0.79166670f, 0.45833334f}, - {0.79166670f, 0.45833334f}, - {0.79166670f, 0.45833334f}, - {0.79166670f, 0.45833334f}, - {0.79166670f, 0.45833334f}, - {0.79166670f, 0.45833334f}, - {0.87500000f, 0.45833334f}, - {0.87500000f, 0.45833334f}, - {0.87500000f, 0.45833334f}, - {0.87500000f, 0.45833334f}, - {0.87500000f, 0.45833334f}, - {0.87500000f, 0.45833334f}, - {0.95833330f, 0.45833334f}, - {0.95833330f, 0.45833334f}, - {0.95833330f, 0.45833334f}, - {0.95833330f, 0.45833334f}, - {0.95833330f, 0.45833334f}, - {0.95833330f, 0.45833334f}, - {0.04166667f, 0.54166670f}, - {0.04166667f, 0.54166670f}, - {0.04166667f, 0.54166670f}, - {0.04166667f, 0.54166670f}, - {0.04166667f, 0.54166670f}, - {0.04166667f, 0.54166670f}, - {0.12500000f, 0.54166670f}, - {0.12500000f, 0.54166670f}, - {0.12500000f, 0.54166670f}, - {0.12500000f, 0.54166670f}, - {0.12500000f, 0.54166670f}, - {0.12500000f, 0.54166670f}, - {0.20833333f, 0.54166670f}, - {0.20833333f, 0.54166670f}, - {0.20833333f, 0.54166670f}, - {0.20833333f, 0.54166670f}, - {0.20833333f, 0.54166670f}, - {0.20833333f, 0.54166670f}, - {0.29166666f, 0.54166670f}, - {0.29166666f, 0.54166670f}, - {0.29166666f, 0.54166670f}, - {0.29166666f, 0.54166670f}, - {0.29166666f, 0.54166670f}, - {0.29166666f, 0.54166670f}, - {0.37500000f, 0.54166670f}, - {0.37500000f, 0.54166670f}, - {0.37500000f, 0.54166670f}, - {0.37500000f, 0.54166670f}, - {0.37500000f, 0.54166670f}, - {0.37500000f, 0.54166670f}, - {0.45833334f, 0.54166670f}, - {0.45833334f, 0.54166670f}, - {0.45833334f, 0.54166670f}, - {0.45833334f, 0.54166670f}, - {0.45833334f, 0.54166670f}, - {0.45833334f, 0.54166670f}, - {0.54166670f, 0.54166670f}, - {0.54166670f, 0.54166670f}, - {0.54166670f, 0.54166670f}, - {0.54166670f, 0.54166670f}, - {0.54166670f, 0.54166670f}, - {0.54166670f, 0.54166670f}, - {0.62500000f, 0.54166670f}, - {0.62500000f, 0.54166670f}, - {0.62500000f, 0.54166670f}, - {0.62500000f, 0.54166670f}, - {0.62500000f, 0.54166670f}, - {0.62500000f, 0.54166670f}, - {0.70833330f, 0.54166670f}, - {0.70833330f, 0.54166670f}, - {0.70833330f, 0.54166670f}, - {0.70833330f, 0.54166670f}, - {0.70833330f, 0.54166670f}, - {0.70833330f, 0.54166670f}, - {0.79166670f, 0.54166670f}, - {0.79166670f, 0.54166670f}, - {0.79166670f, 0.54166670f}, - {0.79166670f, 0.54166670f}, - {0.79166670f, 0.54166670f}, - {0.79166670f, 0.54166670f}, - {0.87500000f, 0.54166670f}, - {0.87500000f, 0.54166670f}, - {0.87500000f, 0.54166670f}, - {0.87500000f, 0.54166670f}, - {0.87500000f, 0.54166670f}, - {0.87500000f, 0.54166670f}, - {0.95833330f, 0.54166670f}, - {0.95833330f, 0.54166670f}, - {0.95833330f, 0.54166670f}, - {0.95833330f, 0.54166670f}, - {0.95833330f, 0.54166670f}, - {0.95833330f, 0.54166670f}, - {0.04166667f, 0.62500000f}, - {0.04166667f, 0.62500000f}, - {0.04166667f, 0.62500000f}, - {0.04166667f, 0.62500000f}, - {0.04166667f, 0.62500000f}, - {0.04166667f, 0.62500000f}, - {0.12500000f, 0.62500000f}, - {0.12500000f, 0.62500000f}, - {0.12500000f, 0.62500000f}, - {0.12500000f, 0.62500000f}, - {0.12500000f, 0.62500000f}, - {0.12500000f, 0.62500000f}, - {0.20833333f, 0.62500000f}, - {0.20833333f, 0.62500000f}, - {0.20833333f, 0.62500000f}, - {0.20833333f, 0.62500000f}, - {0.20833333f, 0.62500000f}, - {0.20833333f, 0.62500000f}, - {0.29166666f, 0.62500000f}, - {0.29166666f, 0.62500000f}, - {0.29166666f, 0.62500000f}, - {0.29166666f, 0.62500000f}, - {0.29166666f, 0.62500000f}, - {0.29166666f, 0.62500000f}, - {0.37500000f, 0.62500000f}, - {0.37500000f, 0.62500000f}, - {0.37500000f, 0.62500000f}, - {0.37500000f, 0.62500000f}, - {0.37500000f, 0.62500000f}, - {0.37500000f, 0.62500000f}, - {0.45833334f, 0.62500000f}, - {0.45833334f, 0.62500000f}, - {0.45833334f, 0.62500000f}, - {0.45833334f, 0.62500000f}, - {0.45833334f, 0.62500000f}, - {0.45833334f, 0.62500000f}, - {0.54166670f, 0.62500000f}, - {0.54166670f, 0.62500000f}, - {0.54166670f, 0.62500000f}, - {0.54166670f, 0.62500000f}, - {0.54166670f, 0.62500000f}, - {0.54166670f, 0.62500000f}, - {0.62500000f, 0.62500000f}, - {0.62500000f, 0.62500000f}, - {0.62500000f, 0.62500000f}, - {0.62500000f, 0.62500000f}, - {0.62500000f, 0.62500000f}, - {0.62500000f, 0.62500000f}, - {0.70833330f, 0.62500000f}, - {0.70833330f, 0.62500000f}, - {0.70833330f, 0.62500000f}, - {0.70833330f, 0.62500000f}, - {0.70833330f, 0.62500000f}, - {0.70833330f, 0.62500000f}, - {0.79166670f, 0.62500000f}, - {0.79166670f, 0.62500000f}, - {0.79166670f, 0.62500000f}, - {0.79166670f, 0.62500000f}, - {0.79166670f, 0.62500000f}, - {0.79166670f, 0.62500000f}, - {0.87500000f, 0.62500000f}, - {0.87500000f, 0.62500000f}, - {0.87500000f, 0.62500000f}, - {0.87500000f, 0.62500000f}, - {0.87500000f, 0.62500000f}, - {0.87500000f, 0.62500000f}, - {0.95833330f, 0.62500000f}, - {0.95833330f, 0.62500000f}, - {0.95833330f, 0.62500000f}, - {0.95833330f, 0.62500000f}, - {0.95833330f, 0.62500000f}, - {0.95833330f, 0.62500000f}, - {0.04166667f, 0.70833330f}, - {0.04166667f, 0.70833330f}, - {0.04166667f, 0.70833330f}, - {0.04166667f, 0.70833330f}, - {0.04166667f, 0.70833330f}, - {0.04166667f, 0.70833330f}, - {0.12500000f, 0.70833330f}, - {0.12500000f, 0.70833330f}, - {0.12500000f, 0.70833330f}, - {0.12500000f, 0.70833330f}, - {0.12500000f, 0.70833330f}, - {0.12500000f, 0.70833330f}, - {0.20833333f, 0.70833330f}, - {0.20833333f, 0.70833330f}, - {0.20833333f, 0.70833330f}, - {0.20833333f, 0.70833330f}, - {0.20833333f, 0.70833330f}, - {0.20833333f, 0.70833330f}, - {0.29166666f, 0.70833330f}, - {0.29166666f, 0.70833330f}, - {0.29166666f, 0.70833330f}, - {0.29166666f, 0.70833330f}, - {0.29166666f, 0.70833330f}, - {0.29166666f, 0.70833330f}, - {0.37500000f, 0.70833330f}, - {0.37500000f, 0.70833330f}, - {0.37500000f, 0.70833330f}, - {0.37500000f, 0.70833330f}, - {0.37500000f, 0.70833330f}, - {0.37500000f, 0.70833330f}, - {0.45833334f, 0.70833330f}, - {0.45833334f, 0.70833330f}, - {0.45833334f, 0.70833330f}, - {0.45833334f, 0.70833330f}, - {0.45833334f, 0.70833330f}, - {0.45833334f, 0.70833330f}, - {0.54166670f, 0.70833330f}, - {0.54166670f, 0.70833330f}, - {0.54166670f, 0.70833330f}, - {0.54166670f, 0.70833330f}, - {0.54166670f, 0.70833330f}, - {0.54166670f, 0.70833330f}, - {0.62500000f, 0.70833330f}, - {0.62500000f, 0.70833330f}, - {0.62500000f, 0.70833330f}, - {0.62500000f, 0.70833330f}, - {0.62500000f, 0.70833330f}, - {0.62500000f, 0.70833330f}, - {0.70833330f, 0.70833330f}, - {0.70833330f, 0.70833330f}, - {0.70833330f, 0.70833330f}, - {0.70833330f, 0.70833330f}, - {0.70833330f, 0.70833330f}, - {0.70833330f, 0.70833330f}, - {0.79166670f, 0.70833330f}, - {0.79166670f, 0.70833330f}, - {0.79166670f, 0.70833330f}, - {0.79166670f, 0.70833330f}, - {0.79166670f, 0.70833330f}, - {0.79166670f, 0.70833330f}, - {0.87500000f, 0.70833330f}, - {0.87500000f, 0.70833330f}, - {0.87500000f, 0.70833330f}, - {0.87500000f, 0.70833330f}, - {0.87500000f, 0.70833330f}, - {0.87500000f, 0.70833330f}, - {0.95833330f, 0.70833330f}, - {0.95833330f, 0.70833330f}, - {0.95833330f, 0.70833330f}, - {0.95833330f, 0.70833330f}, - {0.95833330f, 0.70833330f}, - {0.95833330f, 0.70833330f}, - {0.04166667f, 0.79166670f}, - {0.04166667f, 0.79166670f}, - {0.04166667f, 0.79166670f}, - {0.04166667f, 0.79166670f}, - {0.04166667f, 0.79166670f}, - {0.04166667f, 0.79166670f}, - {0.12500000f, 0.79166670f}, - {0.12500000f, 0.79166670f}, - {0.12500000f, 0.79166670f}, - {0.12500000f, 0.79166670f}, - {0.12500000f, 0.79166670f}, - {0.12500000f, 0.79166670f}, - {0.20833333f, 0.79166670f}, - {0.20833333f, 0.79166670f}, - {0.20833333f, 0.79166670f}, - {0.20833333f, 0.79166670f}, - {0.20833333f, 0.79166670f}, - {0.20833333f, 0.79166670f}, - {0.29166666f, 0.79166670f}, - {0.29166666f, 0.79166670f}, - {0.29166666f, 0.79166670f}, - {0.29166666f, 0.79166670f}, - {0.29166666f, 0.79166670f}, - {0.29166666f, 0.79166670f}, - {0.37500000f, 0.79166670f}, - {0.37500000f, 0.79166670f}, - {0.37500000f, 0.79166670f}, - {0.37500000f, 0.79166670f}, - {0.37500000f, 0.79166670f}, - {0.37500000f, 0.79166670f}, - {0.45833334f, 0.79166670f}, - {0.45833334f, 0.79166670f}, - {0.45833334f, 0.79166670f}, - {0.45833334f, 0.79166670f}, - {0.45833334f, 0.79166670f}, - {0.45833334f, 0.79166670f}, - {0.54166670f, 0.79166670f}, - {0.54166670f, 0.79166670f}, - {0.54166670f, 0.79166670f}, - {0.54166670f, 0.79166670f}, - {0.54166670f, 0.79166670f}, - {0.54166670f, 0.79166670f}, - {0.62500000f, 0.79166670f}, - {0.62500000f, 0.79166670f}, - {0.62500000f, 0.79166670f}, - {0.62500000f, 0.79166670f}, - {0.62500000f, 0.79166670f}, - {0.62500000f, 0.79166670f}, - {0.70833330f, 0.79166670f}, - {0.70833330f, 0.79166670f}, - {0.70833330f, 0.79166670f}, - {0.70833330f, 0.79166670f}, - {0.70833330f, 0.79166670f}, - {0.70833330f, 0.79166670f}, - {0.79166670f, 0.79166670f}, - {0.79166670f, 0.79166670f}, - {0.79166670f, 0.79166670f}, - {0.79166670f, 0.79166670f}, - {0.79166670f, 0.79166670f}, - {0.79166670f, 0.79166670f}, - {0.87500000f, 0.79166670f}, - {0.87500000f, 0.79166670f}, - {0.87500000f, 0.79166670f}, - {0.87500000f, 0.79166670f}, - {0.87500000f, 0.79166670f}, - {0.87500000f, 0.79166670f}, - {0.95833330f, 0.79166670f}, - {0.95833330f, 0.79166670f}, - {0.95833330f, 0.79166670f}, - {0.95833330f, 0.79166670f}, - {0.95833330f, 0.79166670f}, - {0.95833330f, 0.79166670f}, - {0.04166667f, 0.87500000f}, - {0.04166667f, 0.87500000f}, - {0.04166667f, 0.87500000f}, - {0.04166667f, 0.87500000f}, - {0.04166667f, 0.87500000f}, - {0.04166667f, 0.87500000f}, - {0.12500000f, 0.87500000f}, - {0.12500000f, 0.87500000f}, - {0.12500000f, 0.87500000f}, - {0.12500000f, 0.87500000f}, - {0.12500000f, 0.87500000f}, - {0.12500000f, 0.87500000f}, - {0.20833333f, 0.87500000f}, - {0.20833333f, 0.87500000f}, - {0.20833333f, 0.87500000f}, - {0.20833333f, 0.87500000f}, - {0.20833333f, 0.87500000f}, - {0.20833333f, 0.87500000f}, - {0.29166666f, 0.87500000f}, - {0.29166666f, 0.87500000f}, - {0.29166666f, 0.87500000f}, - {0.29166666f, 0.87500000f}, - {0.29166666f, 0.87500000f}, - {0.29166666f, 0.87500000f}, - {0.37500000f, 0.87500000f}, - {0.37500000f, 0.87500000f}, - {0.37500000f, 0.87500000f}, - {0.37500000f, 0.87500000f}, - {0.37500000f, 0.87500000f}, - {0.37500000f, 0.87500000f}, - {0.45833334f, 0.87500000f}, - {0.45833334f, 0.87500000f}, - {0.45833334f, 0.87500000f}, - {0.45833334f, 0.87500000f}, - {0.45833334f, 0.87500000f}, - {0.45833334f, 0.87500000f}, - {0.54166670f, 0.87500000f}, - {0.54166670f, 0.87500000f}, - {0.54166670f, 0.87500000f}, - {0.54166670f, 0.87500000f}, - {0.54166670f, 0.87500000f}, - {0.54166670f, 0.87500000f}, - {0.62500000f, 0.87500000f}, - {0.62500000f, 0.87500000f}, - {0.62500000f, 0.87500000f}, - {0.62500000f, 0.87500000f}, - {0.62500000f, 0.87500000f}, - {0.62500000f, 0.87500000f}, - {0.70833330f, 0.87500000f}, - {0.70833330f, 0.87500000f}, - {0.70833330f, 0.87500000f}, - {0.70833330f, 0.87500000f}, - {0.70833330f, 0.87500000f}, - {0.70833330f, 0.87500000f}, - {0.79166670f, 0.87500000f}, - {0.79166670f, 0.87500000f}, - {0.79166670f, 0.87500000f}, - {0.79166670f, 0.87500000f}, - {0.79166670f, 0.87500000f}, - {0.79166670f, 0.87500000f}, - {0.87500000f, 0.87500000f}, - {0.87500000f, 0.87500000f}, - {0.87500000f, 0.87500000f}, - {0.87500000f, 0.87500000f}, - {0.87500000f, 0.87500000f}, - {0.87500000f, 0.87500000f}, - {0.95833330f, 0.87500000f}, - {0.95833330f, 0.87500000f}, - {0.95833330f, 0.87500000f}, - {0.95833330f, 0.87500000f}, - {0.95833330f, 0.87500000f}, - {0.95833330f, 0.87500000f}, - {0.04166667f, 0.95833330f}, - {0.04166667f, 0.95833330f}, - {0.04166667f, 0.95833330f}, - {0.04166667f, 0.95833330f}, - {0.04166667f, 0.95833330f}, - {0.04166667f, 0.95833330f}, - {0.12500000f, 0.95833330f}, - {0.12500000f, 0.95833330f}, - {0.12500000f, 0.95833330f}, - {0.12500000f, 0.95833330f}, - {0.12500000f, 0.95833330f}, - {0.12500000f, 0.95833330f}, - {0.20833333f, 0.95833330f}, - {0.20833333f, 0.95833330f}, - {0.20833333f, 0.95833330f}, - {0.20833333f, 0.95833330f}, - {0.20833333f, 0.95833330f}, - {0.20833333f, 0.95833330f}, - {0.29166666f, 0.95833330f}, - {0.29166666f, 0.95833330f}, - {0.29166666f, 0.95833330f}, - {0.29166666f, 0.95833330f}, - {0.29166666f, 0.95833330f}, - {0.29166666f, 0.95833330f}, - {0.37500000f, 0.95833330f}, - {0.37500000f, 0.95833330f}, - {0.37500000f, 0.95833330f}, - {0.37500000f, 0.95833330f}, - {0.37500000f, 0.95833330f}, - {0.37500000f, 0.95833330f}, - {0.45833334f, 0.95833330f}, - {0.45833334f, 0.95833330f}, - {0.45833334f, 0.95833330f}, - {0.45833334f, 0.95833330f}, - {0.45833334f, 0.95833330f}, - {0.45833334f, 0.95833330f}, - {0.54166670f, 0.95833330f}, - {0.54166670f, 0.95833330f}, - {0.54166670f, 0.95833330f}, - {0.54166670f, 0.95833330f}, - {0.54166670f, 0.95833330f}, - {0.54166670f, 0.95833330f}, - {0.62500000f, 0.95833330f}, - {0.62500000f, 0.95833330f}, - {0.62500000f, 0.95833330f}, - {0.62500000f, 0.95833330f}, - {0.62500000f, 0.95833330f}, - {0.62500000f, 0.95833330f}, - {0.70833330f, 0.95833330f}, - {0.70833330f, 0.95833330f}, - {0.70833330f, 0.95833330f}, - {0.70833330f, 0.95833330f}, - {0.70833330f, 0.95833330f}, - {0.70833330f, 0.95833330f}, - {0.79166670f, 0.95833330f}, - {0.79166670f, 0.95833330f}, - {0.79166670f, 0.95833330f}, - {0.79166670f, 0.95833330f}, - {0.79166670f, 0.95833330f}, - {0.79166670f, 0.95833330f}, - {0.87500000f, 0.95833330f}, - {0.87500000f, 0.95833330f}, - {0.87500000f, 0.95833330f}, - {0.87500000f, 0.95833330f}, - {0.87500000f, 0.95833330f}, - {0.87500000f, 0.95833330f}, - {0.95833330f, 0.95833330f}, - {0.95833330f, 0.95833330f}, - {0.95833330f, 0.95833330f}, - {0.95833330f, 0.95833330f}, - {0.95833330f, 0.95833330f}, - {0.95833330f, 0.95833330f}, - }; - return anchors; -} diff --git a/models/palm_detection_mediapipe/demo.py b/models/palm_detection_mediapipe/demo.py deleted file mode 100644 index 98fdf694..00000000 --- a/models/palm_detection_mediapipe/demo.py +++ /dev/null @@ -1,134 +0,0 @@ -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from mp_palmdet import MPPalmDet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='Hand Detector from MediaPipe') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='./palm_detection_mediapipe_2023feb.onnx', - help='Usage: Set model path, defaults to palm_detection_mediapipe_2023feb.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--score_threshold', type=float, default=0.8, - help='Usage: Set the minimum needed confidence for the model to identify a palm, defaults to 0.8. Smaller values may result in faster detection, but will limit accuracy. Filter out faces of confidence < conf_threshold. An empirical score threshold for the quantized model is 0.49.') -parser.add_argument('--nms_threshold', type=float, default=0.3, - help='Usage: Suppress bounding boxes of iou >= nms_threshold. Default = 0.3.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(image, results, print_results=False, fps=None): - output = image.copy() - - if fps is not None: - cv.putText(output, 'FPS: {:.2f}'.format(fps), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255)) - - for idx, palm in enumerate(results): - score = palm[-1] - palm_box = palm[0:4] - palm_landmarks = palm[4:-1].reshape(7, 2) - - # put score - palm_box = palm_box.astype(np.int32) - cv.putText(output, '{:.4f}'.format(score), (palm_box[0], palm_box[1]+12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 255, 0)) - - # draw box - cv.rectangle(output, (palm_box[0], palm_box[1]), (palm_box[2], palm_box[3]), (0, 255, 0), 2) - - # draw points - palm_landmarks = palm_landmarks.astype(np.int32) - for p in palm_landmarks: - cv.circle(output, p, 2, (0, 0, 255), 2) - - # Print results - if print_results: - print('-----------palm {}-----------'.format(idx + 1)) - print('score: {:.2f}'.format(score)) - print('palm box: {}'.format(palm_box)) - print('palm landmarks: ') - for plm in palm_landmarks: - print('\t{}'.format(plm)) - - return output - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate MPPalmDet - model = MPPalmDet(modelPath=args.model, - nmsThreshold=args.nms_threshold, - scoreThreshold=args.score_threshold, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - image = cv.imread(args.input) - - # Inference - results = model.infer(image) - if len(results) == 0: - print('Hand not detected') - - # Draw results on the input image - image = visualize(image, results, print_results=True) - - # Save results if save is true - if args.save: - print('Resutls saved to result.jpg\n') - cv.imwrite('result.jpg', image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # Inference - tm.start() - results = model.infer(frame) - tm.stop() - - # Draw results on the input image - frame = visualize(frame, results, fps=tm.getFPS()) - - # Visualize results in a new Window - cv.imshow('MPPalmDet Demo', frame) - - tm.reset() diff --git a/models/palm_detection_mediapipe/example_outputs/mppalmdet_demo.gif b/models/palm_detection_mediapipe/example_outputs/mppalmdet_demo.gif deleted file mode 100644 index 98dae4d4..00000000 --- a/models/palm_detection_mediapipe/example_outputs/mppalmdet_demo.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e4a6e6ff306117f575807ea05bf06c67190bf16fe2d315873acd0824a678dfaf -size 2178521 diff --git a/models/palm_detection_mediapipe/mp_palmdet.py b/models/palm_detection_mediapipe/mp_palmdet.py deleted file mode 100644 index fd1f3bad..00000000 --- a/models/palm_detection_mediapipe/mp_palmdet.py +++ /dev/null @@ -1,2121 +0,0 @@ -import numpy as np -import cv2 as cv - -class MPPalmDet: - def __init__(self, modelPath, nmsThreshold=0.3, scoreThreshold=0.5, topK=5000, backendId=0, targetId=0): - self.model_path = modelPath - self.nms_threshold = nmsThreshold - self.score_threshold = scoreThreshold - self.topK = topK - self.backend_id = backendId - self.target_id = targetId - - self.input_size = np.array([192, 192]) # wh - - self.model = cv.dnn.readNet(self.model_path) - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - self.anchors = self._load_anchors() - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self.backend_id = backendId - self.target_id = targetId - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - def _preprocess(self, image): - pad_bias = np.array([0., 0.]) # left, top - ratio = min(self.input_size / image.shape[:2]) - if image.shape[0] != self.input_size[0] or image.shape[1] != self.input_size[1]: - # keep aspect ratio when resize - ratio_size = (np.array(image.shape[:2]) * ratio).astype(np.int32) - image = cv.resize(image, (ratio_size[1], ratio_size[0])) - pad_h = self.input_size[0] - ratio_size[0] - pad_w = self.input_size[1] - ratio_size[1] - pad_bias[0] = left = pad_w // 2 - pad_bias[1] = top = pad_h // 2 - right = pad_w - left - bottom = pad_h - top - image = cv.copyMakeBorder(image, top, bottom, left, right, cv.BORDER_CONSTANT, None, (0, 0, 0)) - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - image = image.astype(np.float32) / 255.0 # norm - pad_bias = (pad_bias / ratio).astype(np.int32) - return image[np.newaxis, :, :, :], pad_bias # hwc -> nhwc - - def infer(self, image): - h, w, _ = image.shape - - # Preprocess - input_blob, pad_bias = self._preprocess(image) - - # Forward - self.model.setInput(input_blob) - output_blob = self.model.forward(self.model.getUnconnectedOutLayersNames()) - - # Postprocess - results = self._postprocess(output_blob, np.array([w, h]), pad_bias) - - return results - - def _postprocess(self, output_blob, original_shape, pad_bias): - score = output_blob[1][0, :, 0] - box_delta = output_blob[0][0, :, 0:4] - landmark_delta = output_blob[0][0, :, 4:] - scale = max(original_shape) - - # get scores - score = score.astype(np.float64) - score = 1 / (1 + np.exp(-score)) - - # get boxes - cxy_delta = box_delta[:, :2] / self.input_size - wh_delta = box_delta[:, 2:] / self.input_size - xy1 = (cxy_delta - wh_delta / 2 + self.anchors) * scale - xy2 = (cxy_delta + wh_delta / 2 + self.anchors) * scale - boxes = np.concatenate([xy1, xy2], axis=1) - boxes -= [pad_bias[0], pad_bias[1], pad_bias[0], pad_bias[1]] - # NMS - keep_idx = cv.dnn.NMSBoxes(boxes, score, self.score_threshold, self.nms_threshold, top_k=self.topK) - if len(keep_idx) == 0: - return np.empty(shape=(0, 19)) - selected_score = score[keep_idx] - selected_box = boxes[keep_idx] - - # get landmarks - selected_landmarks = landmark_delta[keep_idx].reshape(-1, 7, 2) - selected_landmarks = selected_landmarks / self.input_size - selected_anchors = self.anchors[keep_idx] - for idx, landmark in enumerate(selected_landmarks): - landmark += selected_anchors[idx] - selected_landmarks *= scale - selected_landmarks -= pad_bias - - # [ - # [bbox_coords, landmarks_coords, score] - # ... - # [bbox_coords, landmarks_coords, score] - # ] - return np.c_[selected_box.reshape(-1, 4), selected_landmarks.reshape(-1, 14), selected_score.reshape(-1, 1)] - - def _load_anchors(self): - return np.array([[0.02083333, 0.02083333], - [0.02083333, 0.02083333], - [0.0625, 0.02083333], - [0.0625, 0.02083333], - [0.10416666, 0.02083333], - [0.10416666, 0.02083333], - [0.14583333, 0.02083333], - [0.14583333, 0.02083333], - [0.1875, 0.02083333], - [0.1875, 0.02083333], - [0.22916667, 0.02083333], - [0.22916667, 0.02083333], - [0.27083334, 0.02083333], - [0.27083334, 0.02083333], - [0.3125, 0.02083333], - [0.3125, 0.02083333], - [0.35416666, 0.02083333], - [0.35416666, 0.02083333], - [0.39583334, 0.02083333], - [0.39583334, 0.02083333], - [0.4375, 0.02083333], - [0.4375, 0.02083333], - [0.47916666, 0.02083333], - [0.47916666, 0.02083333], - [0.5208333, 0.02083333], - [0.5208333, 0.02083333], - [0.5625, 0.02083333], - [0.5625, 0.02083333], - [0.6041667, 0.02083333], - [0.6041667, 0.02083333], - [0.6458333, 0.02083333], - [0.6458333, 0.02083333], - [0.6875, 0.02083333], - [0.6875, 0.02083333], - [0.7291667, 0.02083333], - [0.7291667, 0.02083333], - [0.7708333, 0.02083333], - [0.7708333, 0.02083333], - [0.8125, 0.02083333], - [0.8125, 0.02083333], - [0.8541667, 0.02083333], - [0.8541667, 0.02083333], - [0.8958333, 0.02083333], - [0.8958333, 0.02083333], - [0.9375, 0.02083333], - [0.9375, 0.02083333], - [0.9791667, 0.02083333], - [0.9791667, 0.02083333], - [0.02083333, 0.0625], - [0.02083333, 0.0625], - [0.0625, 0.0625], - [0.0625, 0.0625], - [0.10416666, 0.0625], - [0.10416666, 0.0625], - [0.14583333, 0.0625], - [0.14583333, 0.0625], - [0.1875, 0.0625], - [0.1875, 0.0625], - [0.22916667, 0.0625], - [0.22916667, 0.0625], - [0.27083334, 0.0625], - [0.27083334, 0.0625], - [0.3125, 0.0625], - [0.3125, 0.0625], - [0.35416666, 0.0625], - [0.35416666, 0.0625], - [0.39583334, 0.0625], - [0.39583334, 0.0625], - [0.4375, 0.0625], - [0.4375, 0.0625], - [0.47916666, 0.0625], - [0.47916666, 0.0625], - [0.5208333, 0.0625], - [0.5208333, 0.0625], - [0.5625, 0.0625], - [0.5625, 0.0625], - [0.6041667, 0.0625], - [0.6041667, 0.0625], - [0.6458333, 0.0625], - [0.6458333, 0.0625], - [0.6875, 0.0625], - [0.6875, 0.0625], - [0.7291667, 0.0625], - [0.7291667, 0.0625], - [0.7708333, 0.0625], - [0.7708333, 0.0625], - [0.8125, 0.0625], - [0.8125, 0.0625], - [0.8541667, 0.0625], - [0.8541667, 0.0625], - [0.8958333, 0.0625], - [0.8958333, 0.0625], - [0.9375, 0.0625], - [0.9375, 0.0625], - [0.9791667, 0.0625], - [0.9791667, 0.0625], - [0.02083333, 0.10416666], - [0.02083333, 0.10416666], - [0.0625, 0.10416666], - [0.0625, 0.10416666], - [0.10416666, 0.10416666], - [0.10416666, 0.10416666], - [0.14583333, 0.10416666], - [0.14583333, 0.10416666], - [0.1875, 0.10416666], - [0.1875, 0.10416666], - [0.22916667, 0.10416666], - [0.22916667, 0.10416666], - [0.27083334, 0.10416666], - [0.27083334, 0.10416666], - [0.3125, 0.10416666], - [0.3125, 0.10416666], - [0.35416666, 0.10416666], - [0.35416666, 0.10416666], - [0.39583334, 0.10416666], - [0.39583334, 0.10416666], - [0.4375, 0.10416666], - [0.4375, 0.10416666], - [0.47916666, 0.10416666], - [0.47916666, 0.10416666], - [0.5208333, 0.10416666], - [0.5208333, 0.10416666], - [0.5625, 0.10416666], - [0.5625, 0.10416666], - [0.6041667, 0.10416666], - [0.6041667, 0.10416666], - [0.6458333, 0.10416666], - [0.6458333, 0.10416666], - [0.6875, 0.10416666], - [0.6875, 0.10416666], - [0.7291667, 0.10416666], - [0.7291667, 0.10416666], - [0.7708333, 0.10416666], - [0.7708333, 0.10416666], - [0.8125, 0.10416666], - [0.8125, 0.10416666], - [0.8541667, 0.10416666], - [0.8541667, 0.10416666], - [0.8958333, 0.10416666], - [0.8958333, 0.10416666], - [0.9375, 0.10416666], - [0.9375, 0.10416666], - [0.9791667, 0.10416666], - [0.9791667, 0.10416666], - [0.02083333, 0.14583333], - [0.02083333, 0.14583333], - [0.0625, 0.14583333], - [0.0625, 0.14583333], - [0.10416666, 0.14583333], - [0.10416666, 0.14583333], - [0.14583333, 0.14583333], - [0.14583333, 0.14583333], - [0.1875, 0.14583333], - [0.1875, 0.14583333], - [0.22916667, 0.14583333], - [0.22916667, 0.14583333], - [0.27083334, 0.14583333], - [0.27083334, 0.14583333], - [0.3125, 0.14583333], - [0.3125, 0.14583333], - [0.35416666, 0.14583333], - [0.35416666, 0.14583333], - [0.39583334, 0.14583333], - [0.39583334, 0.14583333], - [0.4375, 0.14583333], - [0.4375, 0.14583333], - [0.47916666, 0.14583333], - [0.47916666, 0.14583333], - [0.5208333, 0.14583333], - [0.5208333, 0.14583333], - [0.5625, 0.14583333], - [0.5625, 0.14583333], - [0.6041667, 0.14583333], - [0.6041667, 0.14583333], - [0.6458333, 0.14583333], - [0.6458333, 0.14583333], - [0.6875, 0.14583333], - [0.6875, 0.14583333], - [0.7291667, 0.14583333], - [0.7291667, 0.14583333], - [0.7708333, 0.14583333], - [0.7708333, 0.14583333], - [0.8125, 0.14583333], - [0.8125, 0.14583333], - [0.8541667, 0.14583333], - [0.8541667, 0.14583333], - [0.8958333, 0.14583333], - [0.8958333, 0.14583333], - [0.9375, 0.14583333], - [0.9375, 0.14583333], - [0.9791667, 0.14583333], - [0.9791667, 0.14583333], - [0.02083333, 0.1875], - [0.02083333, 0.1875], - [0.0625, 0.1875], - [0.0625, 0.1875], - [0.10416666, 0.1875], - [0.10416666, 0.1875], - [0.14583333, 0.1875], - [0.14583333, 0.1875], - [0.1875, 0.1875], - [0.1875, 0.1875], - [0.22916667, 0.1875], - [0.22916667, 0.1875], - [0.27083334, 0.1875], - [0.27083334, 0.1875], - [0.3125, 0.1875], - [0.3125, 0.1875], - [0.35416666, 0.1875], - [0.35416666, 0.1875], - [0.39583334, 0.1875], - [0.39583334, 0.1875], - [0.4375, 0.1875], - [0.4375, 0.1875], - [0.47916666, 0.1875], - [0.47916666, 0.1875], - [0.5208333, 0.1875], - [0.5208333, 0.1875], - [0.5625, 0.1875], - [0.5625, 0.1875], - [0.6041667, 0.1875], - [0.6041667, 0.1875], - [0.6458333, 0.1875], - [0.6458333, 0.1875], - [0.6875, 0.1875], - [0.6875, 0.1875], - [0.7291667, 0.1875], - [0.7291667, 0.1875], - [0.7708333, 0.1875], - [0.7708333, 0.1875], - [0.8125, 0.1875], - [0.8125, 0.1875], - [0.8541667, 0.1875], - [0.8541667, 0.1875], - [0.8958333, 0.1875], - [0.8958333, 0.1875], - [0.9375, 0.1875], - [0.9375, 0.1875], - [0.9791667, 0.1875], - [0.9791667, 0.1875], - [0.02083333, 0.22916667], - [0.02083333, 0.22916667], - [0.0625, 0.22916667], - [0.0625, 0.22916667], - [0.10416666, 0.22916667], - [0.10416666, 0.22916667], - [0.14583333, 0.22916667], - [0.14583333, 0.22916667], - [0.1875, 0.22916667], - [0.1875, 0.22916667], - [0.22916667, 0.22916667], - [0.22916667, 0.22916667], - [0.27083334, 0.22916667], - [0.27083334, 0.22916667], - [0.3125, 0.22916667], - [0.3125, 0.22916667], - [0.35416666, 0.22916667], - [0.35416666, 0.22916667], - [0.39583334, 0.22916667], - [0.39583334, 0.22916667], - [0.4375, 0.22916667], - [0.4375, 0.22916667], - [0.47916666, 0.22916667], - [0.47916666, 0.22916667], - [0.5208333, 0.22916667], - [0.5208333, 0.22916667], - [0.5625, 0.22916667], - [0.5625, 0.22916667], - [0.6041667, 0.22916667], - [0.6041667, 0.22916667], - [0.6458333, 0.22916667], - [0.6458333, 0.22916667], - [0.6875, 0.22916667], - [0.6875, 0.22916667], - [0.7291667, 0.22916667], - [0.7291667, 0.22916667], - [0.7708333, 0.22916667], - [0.7708333, 0.22916667], - [0.8125, 0.22916667], - [0.8125, 0.22916667], - [0.8541667, 0.22916667], - [0.8541667, 0.22916667], - [0.8958333, 0.22916667], - [0.8958333, 0.22916667], - [0.9375, 0.22916667], - [0.9375, 0.22916667], - [0.9791667, 0.22916667], - [0.9791667, 0.22916667], - [0.02083333, 0.27083334], - [0.02083333, 0.27083334], - [0.0625, 0.27083334], - [0.0625, 0.27083334], - [0.10416666, 0.27083334], - [0.10416666, 0.27083334], - [0.14583333, 0.27083334], - [0.14583333, 0.27083334], - [0.1875, 0.27083334], - [0.1875, 0.27083334], - [0.22916667, 0.27083334], - [0.22916667, 0.27083334], - [0.27083334, 0.27083334], - [0.27083334, 0.27083334], - [0.3125, 0.27083334], - [0.3125, 0.27083334], - [0.35416666, 0.27083334], - [0.35416666, 0.27083334], - [0.39583334, 0.27083334], - [0.39583334, 0.27083334], - [0.4375, 0.27083334], - [0.4375, 0.27083334], - [0.47916666, 0.27083334], - [0.47916666, 0.27083334], - [0.5208333, 0.27083334], - [0.5208333, 0.27083334], - [0.5625, 0.27083334], - [0.5625, 0.27083334], - [0.6041667, 0.27083334], - [0.6041667, 0.27083334], - [0.6458333, 0.27083334], - [0.6458333, 0.27083334], - [0.6875, 0.27083334], - [0.6875, 0.27083334], - [0.7291667, 0.27083334], - [0.7291667, 0.27083334], - [0.7708333, 0.27083334], - [0.7708333, 0.27083334], - [0.8125, 0.27083334], - [0.8125, 0.27083334], - [0.8541667, 0.27083334], - [0.8541667, 0.27083334], - [0.8958333, 0.27083334], - [0.8958333, 0.27083334], - [0.9375, 0.27083334], - [0.9375, 0.27083334], - [0.9791667, 0.27083334], - [0.9791667, 0.27083334], - [0.02083333, 0.3125], - [0.02083333, 0.3125], - [0.0625, 0.3125], - [0.0625, 0.3125], - [0.10416666, 0.3125], - [0.10416666, 0.3125], - [0.14583333, 0.3125], - [0.14583333, 0.3125], - [0.1875, 0.3125], - [0.1875, 0.3125], - [0.22916667, 0.3125], - [0.22916667, 0.3125], - [0.27083334, 0.3125], - [0.27083334, 0.3125], - [0.3125, 0.3125], - [0.3125, 0.3125], - [0.35416666, 0.3125], - [0.35416666, 0.3125], - [0.39583334, 0.3125], - [0.39583334, 0.3125], - [0.4375, 0.3125], - [0.4375, 0.3125], - [0.47916666, 0.3125], - [0.47916666, 0.3125], - [0.5208333, 0.3125], - [0.5208333, 0.3125], - [0.5625, 0.3125], - [0.5625, 0.3125], - [0.6041667, 0.3125], - [0.6041667, 0.3125], - [0.6458333, 0.3125], - [0.6458333, 0.3125], - [0.6875, 0.3125], - [0.6875, 0.3125], - [0.7291667, 0.3125], - [0.7291667, 0.3125], - [0.7708333, 0.3125], - [0.7708333, 0.3125], - [0.8125, 0.3125], - [0.8125, 0.3125], - [0.8541667, 0.3125], - [0.8541667, 0.3125], - [0.8958333, 0.3125], - [0.8958333, 0.3125], - [0.9375, 0.3125], - [0.9375, 0.3125], - [0.9791667, 0.3125], - [0.9791667, 0.3125], - [0.02083333, 0.35416666], - [0.02083333, 0.35416666], - [0.0625, 0.35416666], - [0.0625, 0.35416666], - [0.10416666, 0.35416666], - [0.10416666, 0.35416666], - [0.14583333, 0.35416666], - [0.14583333, 0.35416666], - [0.1875, 0.35416666], - [0.1875, 0.35416666], - [0.22916667, 0.35416666], - [0.22916667, 0.35416666], - [0.27083334, 0.35416666], - [0.27083334, 0.35416666], - [0.3125, 0.35416666], - [0.3125, 0.35416666], - [0.35416666, 0.35416666], - [0.35416666, 0.35416666], - [0.39583334, 0.35416666], - [0.39583334, 0.35416666], - [0.4375, 0.35416666], - [0.4375, 0.35416666], - [0.47916666, 0.35416666], - [0.47916666, 0.35416666], - [0.5208333, 0.35416666], - [0.5208333, 0.35416666], - [0.5625, 0.35416666], - [0.5625, 0.35416666], - [0.6041667, 0.35416666], - [0.6041667, 0.35416666], - [0.6458333, 0.35416666], - [0.6458333, 0.35416666], - [0.6875, 0.35416666], - [0.6875, 0.35416666], - [0.7291667, 0.35416666], - [0.7291667, 0.35416666], - [0.7708333, 0.35416666], - [0.7708333, 0.35416666], - [0.8125, 0.35416666], - [0.8125, 0.35416666], - [0.8541667, 0.35416666], - [0.8541667, 0.35416666], - [0.8958333, 0.35416666], - [0.8958333, 0.35416666], - [0.9375, 0.35416666], - [0.9375, 0.35416666], - [0.9791667, 0.35416666], - [0.9791667, 0.35416666], - [0.02083333, 0.39583334], - [0.02083333, 0.39583334], - [0.0625, 0.39583334], - [0.0625, 0.39583334], - [0.10416666, 0.39583334], - [0.10416666, 0.39583334], - [0.14583333, 0.39583334], - [0.14583333, 0.39583334], - [0.1875, 0.39583334], - [0.1875, 0.39583334], - [0.22916667, 0.39583334], - [0.22916667, 0.39583334], - [0.27083334, 0.39583334], - [0.27083334, 0.39583334], - [0.3125, 0.39583334], - [0.3125, 0.39583334], - [0.35416666, 0.39583334], - [0.35416666, 0.39583334], - [0.39583334, 0.39583334], - [0.39583334, 0.39583334], - [0.4375, 0.39583334], - [0.4375, 0.39583334], - [0.47916666, 0.39583334], - [0.47916666, 0.39583334], - [0.5208333, 0.39583334], - [0.5208333, 0.39583334], - [0.5625, 0.39583334], - [0.5625, 0.39583334], - [0.6041667, 0.39583334], - [0.6041667, 0.39583334], - [0.6458333, 0.39583334], - [0.6458333, 0.39583334], - [0.6875, 0.39583334], - [0.6875, 0.39583334], - [0.7291667, 0.39583334], - [0.7291667, 0.39583334], - [0.7708333, 0.39583334], - [0.7708333, 0.39583334], - [0.8125, 0.39583334], - [0.8125, 0.39583334], - [0.8541667, 0.39583334], - [0.8541667, 0.39583334], - [0.8958333, 0.39583334], - [0.8958333, 0.39583334], - [0.9375, 0.39583334], - [0.9375, 0.39583334], - [0.9791667, 0.39583334], - [0.9791667, 0.39583334], - [0.02083333, 0.4375], - [0.02083333, 0.4375], - [0.0625, 0.4375], - [0.0625, 0.4375], - [0.10416666, 0.4375], - [0.10416666, 0.4375], - [0.14583333, 0.4375], - [0.14583333, 0.4375], - [0.1875, 0.4375], - [0.1875, 0.4375], - [0.22916667, 0.4375], - [0.22916667, 0.4375], - [0.27083334, 0.4375], - [0.27083334, 0.4375], - [0.3125, 0.4375], - [0.3125, 0.4375], - [0.35416666, 0.4375], - [0.35416666, 0.4375], - [0.39583334, 0.4375], - [0.39583334, 0.4375], - [0.4375, 0.4375], - [0.4375, 0.4375], - [0.47916666, 0.4375], - [0.47916666, 0.4375], - [0.5208333, 0.4375], - [0.5208333, 0.4375], - [0.5625, 0.4375], - [0.5625, 0.4375], - [0.6041667, 0.4375], - [0.6041667, 0.4375], - [0.6458333, 0.4375], - [0.6458333, 0.4375], - [0.6875, 0.4375], - [0.6875, 0.4375], - [0.7291667, 0.4375], - [0.7291667, 0.4375], - [0.7708333, 0.4375], - [0.7708333, 0.4375], - [0.8125, 0.4375], - [0.8125, 0.4375], - [0.8541667, 0.4375], - [0.8541667, 0.4375], - [0.8958333, 0.4375], - [0.8958333, 0.4375], - [0.9375, 0.4375], - [0.9375, 0.4375], - [0.9791667, 0.4375], - [0.9791667, 0.4375], - [0.02083333, 0.47916666], - [0.02083333, 0.47916666], - [0.0625, 0.47916666], - [0.0625, 0.47916666], - [0.10416666, 0.47916666], - [0.10416666, 0.47916666], - [0.14583333, 0.47916666], - [0.14583333, 0.47916666], - [0.1875, 0.47916666], - [0.1875, 0.47916666], - [0.22916667, 0.47916666], - [0.22916667, 0.47916666], - [0.27083334, 0.47916666], - [0.27083334, 0.47916666], - [0.3125, 0.47916666], - [0.3125, 0.47916666], - [0.35416666, 0.47916666], - [0.35416666, 0.47916666], - [0.39583334, 0.47916666], - [0.39583334, 0.47916666], - [0.4375, 0.47916666], - [0.4375, 0.47916666], - [0.47916666, 0.47916666], - [0.47916666, 0.47916666], - [0.5208333, 0.47916666], - [0.5208333, 0.47916666], - [0.5625, 0.47916666], - [0.5625, 0.47916666], - [0.6041667, 0.47916666], - [0.6041667, 0.47916666], - [0.6458333, 0.47916666], - [0.6458333, 0.47916666], - [0.6875, 0.47916666], - [0.6875, 0.47916666], - [0.7291667, 0.47916666], - [0.7291667, 0.47916666], - [0.7708333, 0.47916666], - [0.7708333, 0.47916666], - [0.8125, 0.47916666], - [0.8125, 0.47916666], - [0.8541667, 0.47916666], - [0.8541667, 0.47916666], - [0.8958333, 0.47916666], - [0.8958333, 0.47916666], - [0.9375, 0.47916666], - [0.9375, 0.47916666], - [0.9791667, 0.47916666], - [0.9791667, 0.47916666], - [0.02083333, 0.5208333], - [0.02083333, 0.5208333], - [0.0625, 0.5208333], - [0.0625, 0.5208333], - [0.10416666, 0.5208333], - [0.10416666, 0.5208333], - [0.14583333, 0.5208333], - [0.14583333, 0.5208333], - [0.1875, 0.5208333], - [0.1875, 0.5208333], - [0.22916667, 0.5208333], - [0.22916667, 0.5208333], - [0.27083334, 0.5208333], - [0.27083334, 0.5208333], - [0.3125, 0.5208333], - [0.3125, 0.5208333], - [0.35416666, 0.5208333], - [0.35416666, 0.5208333], - [0.39583334, 0.5208333], - [0.39583334, 0.5208333], - [0.4375, 0.5208333], - [0.4375, 0.5208333], - [0.47916666, 0.5208333], - [0.47916666, 0.5208333], - [0.5208333, 0.5208333], - [0.5208333, 0.5208333], - [0.5625, 0.5208333], - [0.5625, 0.5208333], - [0.6041667, 0.5208333], - [0.6041667, 0.5208333], - [0.6458333, 0.5208333], - [0.6458333, 0.5208333], - [0.6875, 0.5208333], - [0.6875, 0.5208333], - [0.7291667, 0.5208333], - [0.7291667, 0.5208333], - [0.7708333, 0.5208333], - [0.7708333, 0.5208333], - [0.8125, 0.5208333], - [0.8125, 0.5208333], - [0.8541667, 0.5208333], - [0.8541667, 0.5208333], - [0.8958333, 0.5208333], - [0.8958333, 0.5208333], - [0.9375, 0.5208333], - [0.9375, 0.5208333], - [0.9791667, 0.5208333], - [0.9791667, 0.5208333], - [0.02083333, 0.5625], - [0.02083333, 0.5625], - [0.0625, 0.5625], - [0.0625, 0.5625], - [0.10416666, 0.5625], - [0.10416666, 0.5625], - [0.14583333, 0.5625], - [0.14583333, 0.5625], - [0.1875, 0.5625], - [0.1875, 0.5625], - [0.22916667, 0.5625], - [0.22916667, 0.5625], - [0.27083334, 0.5625], - [0.27083334, 0.5625], - [0.3125, 0.5625], - [0.3125, 0.5625], - [0.35416666, 0.5625], - [0.35416666, 0.5625], - [0.39583334, 0.5625], - [0.39583334, 0.5625], - [0.4375, 0.5625], - [0.4375, 0.5625], - [0.47916666, 0.5625], - [0.47916666, 0.5625], - [0.5208333, 0.5625], - [0.5208333, 0.5625], - [0.5625, 0.5625], - [0.5625, 0.5625], - [0.6041667, 0.5625], - [0.6041667, 0.5625], - [0.6458333, 0.5625], - [0.6458333, 0.5625], - [0.6875, 0.5625], - [0.6875, 0.5625], - [0.7291667, 0.5625], - [0.7291667, 0.5625], - [0.7708333, 0.5625], - [0.7708333, 0.5625], - [0.8125, 0.5625], - [0.8125, 0.5625], - [0.8541667, 0.5625], - [0.8541667, 0.5625], - [0.8958333, 0.5625], - [0.8958333, 0.5625], - [0.9375, 0.5625], - [0.9375, 0.5625], - [0.9791667, 0.5625], - [0.9791667, 0.5625], - [0.02083333, 0.6041667], - [0.02083333, 0.6041667], - [0.0625, 0.6041667], - [0.0625, 0.6041667], - [0.10416666, 0.6041667], - [0.10416666, 0.6041667], - [0.14583333, 0.6041667], - [0.14583333, 0.6041667], - [0.1875, 0.6041667], - [0.1875, 0.6041667], - [0.22916667, 0.6041667], - [0.22916667, 0.6041667], - [0.27083334, 0.6041667], - [0.27083334, 0.6041667], - [0.3125, 0.6041667], - [0.3125, 0.6041667], - [0.35416666, 0.6041667], - [0.35416666, 0.6041667], - [0.39583334, 0.6041667], - [0.39583334, 0.6041667], - [0.4375, 0.6041667], - [0.4375, 0.6041667], - [0.47916666, 0.6041667], - [0.47916666, 0.6041667], - [0.5208333, 0.6041667], - [0.5208333, 0.6041667], - [0.5625, 0.6041667], - [0.5625, 0.6041667], - [0.6041667, 0.6041667], - [0.6041667, 0.6041667], - [0.6458333, 0.6041667], - [0.6458333, 0.6041667], - [0.6875, 0.6041667], - [0.6875, 0.6041667], - [0.7291667, 0.6041667], - [0.7291667, 0.6041667], - [0.7708333, 0.6041667], - [0.7708333, 0.6041667], - [0.8125, 0.6041667], - [0.8125, 0.6041667], - [0.8541667, 0.6041667], - [0.8541667, 0.6041667], - [0.8958333, 0.6041667], - [0.8958333, 0.6041667], - [0.9375, 0.6041667], - [0.9375, 0.6041667], - [0.9791667, 0.6041667], - [0.9791667, 0.6041667], - [0.02083333, 0.6458333], - [0.02083333, 0.6458333], - [0.0625, 0.6458333], - [0.0625, 0.6458333], - [0.10416666, 0.6458333], - [0.10416666, 0.6458333], - [0.14583333, 0.6458333], - [0.14583333, 0.6458333], - [0.1875, 0.6458333], - [0.1875, 0.6458333], - [0.22916667, 0.6458333], - [0.22916667, 0.6458333], - [0.27083334, 0.6458333], - [0.27083334, 0.6458333], - [0.3125, 0.6458333], - [0.3125, 0.6458333], - [0.35416666, 0.6458333], - [0.35416666, 0.6458333], - [0.39583334, 0.6458333], - [0.39583334, 0.6458333], - [0.4375, 0.6458333], - [0.4375, 0.6458333], - [0.47916666, 0.6458333], - [0.47916666, 0.6458333], - [0.5208333, 0.6458333], - [0.5208333, 0.6458333], - [0.5625, 0.6458333], - [0.5625, 0.6458333], - [0.6041667, 0.6458333], - [0.6041667, 0.6458333], - [0.6458333, 0.6458333], - [0.6458333, 0.6458333], - [0.6875, 0.6458333], - [0.6875, 0.6458333], - [0.7291667, 0.6458333], - [0.7291667, 0.6458333], - [0.7708333, 0.6458333], - [0.7708333, 0.6458333], - [0.8125, 0.6458333], - [0.8125, 0.6458333], - [0.8541667, 0.6458333], - [0.8541667, 0.6458333], - [0.8958333, 0.6458333], - [0.8958333, 0.6458333], - [0.9375, 0.6458333], - [0.9375, 0.6458333], - [0.9791667, 0.6458333], - [0.9791667, 0.6458333], - [0.02083333, 0.6875], - [0.02083333, 0.6875], - [0.0625, 0.6875], - [0.0625, 0.6875], - [0.10416666, 0.6875], - [0.10416666, 0.6875], - [0.14583333, 0.6875], - [0.14583333, 0.6875], - [0.1875, 0.6875], - [0.1875, 0.6875], - [0.22916667, 0.6875], - [0.22916667, 0.6875], - [0.27083334, 0.6875], - [0.27083334, 0.6875], - [0.3125, 0.6875], - [0.3125, 0.6875], - [0.35416666, 0.6875], - [0.35416666, 0.6875], - [0.39583334, 0.6875], - [0.39583334, 0.6875], - [0.4375, 0.6875], - [0.4375, 0.6875], - [0.47916666, 0.6875], - [0.47916666, 0.6875], - [0.5208333, 0.6875], - [0.5208333, 0.6875], - [0.5625, 0.6875], - [0.5625, 0.6875], - [0.6041667, 0.6875], - [0.6041667, 0.6875], - [0.6458333, 0.6875], - [0.6458333, 0.6875], - [0.6875, 0.6875], - [0.6875, 0.6875], - [0.7291667, 0.6875], - [0.7291667, 0.6875], - [0.7708333, 0.6875], - [0.7708333, 0.6875], - [0.8125, 0.6875], - [0.8125, 0.6875], - [0.8541667, 0.6875], - [0.8541667, 0.6875], - [0.8958333, 0.6875], - [0.8958333, 0.6875], - [0.9375, 0.6875], - [0.9375, 0.6875], - [0.9791667, 0.6875], - [0.9791667, 0.6875], - [0.02083333, 0.7291667], - [0.02083333, 0.7291667], - [0.0625, 0.7291667], - [0.0625, 0.7291667], - [0.10416666, 0.7291667], - [0.10416666, 0.7291667], - [0.14583333, 0.7291667], - [0.14583333, 0.7291667], - [0.1875, 0.7291667], - [0.1875, 0.7291667], - [0.22916667, 0.7291667], - [0.22916667, 0.7291667], - [0.27083334, 0.7291667], - [0.27083334, 0.7291667], - [0.3125, 0.7291667], - [0.3125, 0.7291667], - [0.35416666, 0.7291667], - [0.35416666, 0.7291667], - [0.39583334, 0.7291667], - [0.39583334, 0.7291667], - [0.4375, 0.7291667], - [0.4375, 0.7291667], - [0.47916666, 0.7291667], - [0.47916666, 0.7291667], - [0.5208333, 0.7291667], - [0.5208333, 0.7291667], - [0.5625, 0.7291667], - [0.5625, 0.7291667], - [0.6041667, 0.7291667], - [0.6041667, 0.7291667], - [0.6458333, 0.7291667], - [0.6458333, 0.7291667], - [0.6875, 0.7291667], - [0.6875, 0.7291667], - [0.7291667, 0.7291667], - [0.7291667, 0.7291667], - [0.7708333, 0.7291667], - [0.7708333, 0.7291667], - [0.8125, 0.7291667], - [0.8125, 0.7291667], - [0.8541667, 0.7291667], - [0.8541667, 0.7291667], - [0.8958333, 0.7291667], - [0.8958333, 0.7291667], - [0.9375, 0.7291667], - [0.9375, 0.7291667], - [0.9791667, 0.7291667], - [0.9791667, 0.7291667], - [0.02083333, 0.7708333], - [0.02083333, 0.7708333], - [0.0625, 0.7708333], - [0.0625, 0.7708333], - [0.10416666, 0.7708333], - [0.10416666, 0.7708333], - [0.14583333, 0.7708333], - [0.14583333, 0.7708333], - [0.1875, 0.7708333], - [0.1875, 0.7708333], - [0.22916667, 0.7708333], - [0.22916667, 0.7708333], - [0.27083334, 0.7708333], - [0.27083334, 0.7708333], - [0.3125, 0.7708333], - [0.3125, 0.7708333], - [0.35416666, 0.7708333], - [0.35416666, 0.7708333], - [0.39583334, 0.7708333], - [0.39583334, 0.7708333], - [0.4375, 0.7708333], - [0.4375, 0.7708333], - [0.47916666, 0.7708333], - [0.47916666, 0.7708333], - [0.5208333, 0.7708333], - [0.5208333, 0.7708333], - [0.5625, 0.7708333], - [0.5625, 0.7708333], - [0.6041667, 0.7708333], - [0.6041667, 0.7708333], - [0.6458333, 0.7708333], - [0.6458333, 0.7708333], - [0.6875, 0.7708333], - [0.6875, 0.7708333], - [0.7291667, 0.7708333], - [0.7291667, 0.7708333], - [0.7708333, 0.7708333], - [0.7708333, 0.7708333], - [0.8125, 0.7708333], - [0.8125, 0.7708333], - [0.8541667, 0.7708333], - [0.8541667, 0.7708333], - [0.8958333, 0.7708333], - [0.8958333, 0.7708333], - [0.9375, 0.7708333], - [0.9375, 0.7708333], - [0.9791667, 0.7708333], - [0.9791667, 0.7708333], - [0.02083333, 0.8125], - [0.02083333, 0.8125], - [0.0625, 0.8125], - [0.0625, 0.8125], - [0.10416666, 0.8125], - [0.10416666, 0.8125], - [0.14583333, 0.8125], - [0.14583333, 0.8125], - [0.1875, 0.8125], - [0.1875, 0.8125], - [0.22916667, 0.8125], - [0.22916667, 0.8125], - [0.27083334, 0.8125], - [0.27083334, 0.8125], - [0.3125, 0.8125], - [0.3125, 0.8125], - [0.35416666, 0.8125], - [0.35416666, 0.8125], - [0.39583334, 0.8125], - [0.39583334, 0.8125], - [0.4375, 0.8125], - [0.4375, 0.8125], - [0.47916666, 0.8125], - [0.47916666, 0.8125], - [0.5208333, 0.8125], - [0.5208333, 0.8125], - [0.5625, 0.8125], - [0.5625, 0.8125], - [0.6041667, 0.8125], - [0.6041667, 0.8125], - [0.6458333, 0.8125], - [0.6458333, 0.8125], - [0.6875, 0.8125], - [0.6875, 0.8125], - [0.7291667, 0.8125], - [0.7291667, 0.8125], - [0.7708333, 0.8125], - [0.7708333, 0.8125], - [0.8125, 0.8125], - [0.8125, 0.8125], - [0.8541667, 0.8125], - [0.8541667, 0.8125], - [0.8958333, 0.8125], - [0.8958333, 0.8125], - [0.9375, 0.8125], - [0.9375, 0.8125], - [0.9791667, 0.8125], - [0.9791667, 0.8125], - [0.02083333, 0.8541667], - [0.02083333, 0.8541667], - [0.0625, 0.8541667], - [0.0625, 0.8541667], - [0.10416666, 0.8541667], - [0.10416666, 0.8541667], - [0.14583333, 0.8541667], - [0.14583333, 0.8541667], - [0.1875, 0.8541667], - [0.1875, 0.8541667], - [0.22916667, 0.8541667], - [0.22916667, 0.8541667], - [0.27083334, 0.8541667], - [0.27083334, 0.8541667], - [0.3125, 0.8541667], - [0.3125, 0.8541667], - [0.35416666, 0.8541667], - [0.35416666, 0.8541667], - [0.39583334, 0.8541667], - [0.39583334, 0.8541667], - [0.4375, 0.8541667], - [0.4375, 0.8541667], - [0.47916666, 0.8541667], - [0.47916666, 0.8541667], - [0.5208333, 0.8541667], - [0.5208333, 0.8541667], - [0.5625, 0.8541667], - [0.5625, 0.8541667], - [0.6041667, 0.8541667], - [0.6041667, 0.8541667], - [0.6458333, 0.8541667], - [0.6458333, 0.8541667], - [0.6875, 0.8541667], - [0.6875, 0.8541667], - [0.7291667, 0.8541667], - [0.7291667, 0.8541667], - [0.7708333, 0.8541667], - [0.7708333, 0.8541667], - [0.8125, 0.8541667], - [0.8125, 0.8541667], - [0.8541667, 0.8541667], - [0.8541667, 0.8541667], - [0.8958333, 0.8541667], - [0.8958333, 0.8541667], - [0.9375, 0.8541667], - [0.9375, 0.8541667], - [0.9791667, 0.8541667], - [0.9791667, 0.8541667], - [0.02083333, 0.8958333], - [0.02083333, 0.8958333], - [0.0625, 0.8958333], - [0.0625, 0.8958333], - [0.10416666, 0.8958333], - [0.10416666, 0.8958333], - [0.14583333, 0.8958333], - [0.14583333, 0.8958333], - [0.1875, 0.8958333], - [0.1875, 0.8958333], - [0.22916667, 0.8958333], - [0.22916667, 0.8958333], - [0.27083334, 0.8958333], - [0.27083334, 0.8958333], - [0.3125, 0.8958333], - [0.3125, 0.8958333], - [0.35416666, 0.8958333], - [0.35416666, 0.8958333], - [0.39583334, 0.8958333], - [0.39583334, 0.8958333], - [0.4375, 0.8958333], - [0.4375, 0.8958333], - [0.47916666, 0.8958333], - [0.47916666, 0.8958333], - [0.5208333, 0.8958333], - [0.5208333, 0.8958333], - [0.5625, 0.8958333], - [0.5625, 0.8958333], - [0.6041667, 0.8958333], - [0.6041667, 0.8958333], - [0.6458333, 0.8958333], - [0.6458333, 0.8958333], - [0.6875, 0.8958333], - [0.6875, 0.8958333], - [0.7291667, 0.8958333], - [0.7291667, 0.8958333], - [0.7708333, 0.8958333], - [0.7708333, 0.8958333], - [0.8125, 0.8958333], - [0.8125, 0.8958333], - [0.8541667, 0.8958333], - [0.8541667, 0.8958333], - [0.8958333, 0.8958333], - [0.8958333, 0.8958333], - [0.9375, 0.8958333], - [0.9375, 0.8958333], - [0.9791667, 0.8958333], - [0.9791667, 0.8958333], - [0.02083333, 0.9375], - [0.02083333, 0.9375], - [0.0625, 0.9375], - [0.0625, 0.9375], - [0.10416666, 0.9375], - [0.10416666, 0.9375], - [0.14583333, 0.9375], - [0.14583333, 0.9375], - [0.1875, 0.9375], - [0.1875, 0.9375], - [0.22916667, 0.9375], - [0.22916667, 0.9375], - [0.27083334, 0.9375], - [0.27083334, 0.9375], - [0.3125, 0.9375], - [0.3125, 0.9375], - [0.35416666, 0.9375], - [0.35416666, 0.9375], - [0.39583334, 0.9375], - [0.39583334, 0.9375], - [0.4375, 0.9375], - [0.4375, 0.9375], - [0.47916666, 0.9375], - [0.47916666, 0.9375], - [0.5208333, 0.9375], - [0.5208333, 0.9375], - [0.5625, 0.9375], - [0.5625, 0.9375], - [0.6041667, 0.9375], - [0.6041667, 0.9375], - [0.6458333, 0.9375], - [0.6458333, 0.9375], - [0.6875, 0.9375], - [0.6875, 0.9375], - [0.7291667, 0.9375], - [0.7291667, 0.9375], - [0.7708333, 0.9375], - [0.7708333, 0.9375], - [0.8125, 0.9375], - [0.8125, 0.9375], - [0.8541667, 0.9375], - [0.8541667, 0.9375], - [0.8958333, 0.9375], - [0.8958333, 0.9375], - [0.9375, 0.9375], - [0.9375, 0.9375], - [0.9791667, 0.9375], - [0.9791667, 0.9375], - [0.02083333, 0.9791667], - [0.02083333, 0.9791667], - [0.0625, 0.9791667], - [0.0625, 0.9791667], - [0.10416666, 0.9791667], - [0.10416666, 0.9791667], - [0.14583333, 0.9791667], - [0.14583333, 0.9791667], - [0.1875, 0.9791667], - [0.1875, 0.9791667], - [0.22916667, 0.9791667], - [0.22916667, 0.9791667], - [0.27083334, 0.9791667], - [0.27083334, 0.9791667], - [0.3125, 0.9791667], - [0.3125, 0.9791667], - [0.35416666, 0.9791667], - [0.35416666, 0.9791667], - [0.39583334, 0.9791667], - [0.39583334, 0.9791667], - [0.4375, 0.9791667], - [0.4375, 0.9791667], - [0.47916666, 0.9791667], - [0.47916666, 0.9791667], - [0.5208333, 0.9791667], - [0.5208333, 0.9791667], - [0.5625, 0.9791667], - [0.5625, 0.9791667], - [0.6041667, 0.9791667], - [0.6041667, 0.9791667], - [0.6458333, 0.9791667], - [0.6458333, 0.9791667], - [0.6875, 0.9791667], - [0.6875, 0.9791667], - [0.7291667, 0.9791667], - [0.7291667, 0.9791667], - [0.7708333, 0.9791667], - [0.7708333, 0.9791667], - [0.8125, 0.9791667], - [0.8125, 0.9791667], - [0.8541667, 0.9791667], - [0.8541667, 0.9791667], - [0.8958333, 0.9791667], - [0.8958333, 0.9791667], - [0.9375, 0.9791667], - [0.9375, 0.9791667], - [0.9791667, 0.9791667], - [0.9791667, 0.9791667], - [0.04166667, 0.04166667], - [0.04166667, 0.04166667], - [0.04166667, 0.04166667], - [0.04166667, 0.04166667], - [0.04166667, 0.04166667], - [0.04166667, 0.04166667], - [0.125, 0.04166667], - [0.125, 0.04166667], - [0.125, 0.04166667], - [0.125, 0.04166667], - [0.125, 0.04166667], - [0.125, 0.04166667], - [0.20833333, 0.04166667], - [0.20833333, 0.04166667], - [0.20833333, 0.04166667], - [0.20833333, 0.04166667], - [0.20833333, 0.04166667], - [0.20833333, 0.04166667], - [0.29166666, 0.04166667], - [0.29166666, 0.04166667], - [0.29166666, 0.04166667], - [0.29166666, 0.04166667], - [0.29166666, 0.04166667], - [0.29166666, 0.04166667], - [0.375, 0.04166667], - [0.375, 0.04166667], - [0.375, 0.04166667], - [0.375, 0.04166667], - [0.375, 0.04166667], - [0.375, 0.04166667], - [0.45833334, 0.04166667], - [0.45833334, 0.04166667], - [0.45833334, 0.04166667], - [0.45833334, 0.04166667], - [0.45833334, 0.04166667], - [0.45833334, 0.04166667], - [0.5416667, 0.04166667], - [0.5416667, 0.04166667], - [0.5416667, 0.04166667], - [0.5416667, 0.04166667], - [0.5416667, 0.04166667], - [0.5416667, 0.04166667], - [0.625, 0.04166667], - [0.625, 0.04166667], - [0.625, 0.04166667], - [0.625, 0.04166667], - [0.625, 0.04166667], - [0.625, 0.04166667], - [0.7083333, 0.04166667], - [0.7083333, 0.04166667], - [0.7083333, 0.04166667], - [0.7083333, 0.04166667], - [0.7083333, 0.04166667], - [0.7083333, 0.04166667], - [0.7916667, 0.04166667], - [0.7916667, 0.04166667], - [0.7916667, 0.04166667], - [0.7916667, 0.04166667], - [0.7916667, 0.04166667], - [0.7916667, 0.04166667], - [0.875, 0.04166667], - [0.875, 0.04166667], - [0.875, 0.04166667], - [0.875, 0.04166667], - [0.875, 0.04166667], - [0.875, 0.04166667], - [0.9583333, 0.04166667], - [0.9583333, 0.04166667], - [0.9583333, 0.04166667], - [0.9583333, 0.04166667], - [0.9583333, 0.04166667], - [0.9583333, 0.04166667], - [0.04166667, 0.125], - [0.04166667, 0.125], - [0.04166667, 0.125], - [0.04166667, 0.125], - [0.04166667, 0.125], - [0.04166667, 0.125], - [0.125, 0.125], - [0.125, 0.125], - [0.125, 0.125], - [0.125, 0.125], - [0.125, 0.125], - [0.125, 0.125], - [0.20833333, 0.125], - [0.20833333, 0.125], - [0.20833333, 0.125], - [0.20833333, 0.125], - [0.20833333, 0.125], - [0.20833333, 0.125], - [0.29166666, 0.125], - [0.29166666, 0.125], - [0.29166666, 0.125], - [0.29166666, 0.125], - [0.29166666, 0.125], - [0.29166666, 0.125], - [0.375, 0.125], - [0.375, 0.125], - [0.375, 0.125], - [0.375, 0.125], - [0.375, 0.125], - [0.375, 0.125], - [0.45833334, 0.125], - [0.45833334, 0.125], - [0.45833334, 0.125], - [0.45833334, 0.125], - [0.45833334, 0.125], - [0.45833334, 0.125], - [0.5416667, 0.125], - [0.5416667, 0.125], - [0.5416667, 0.125], - [0.5416667, 0.125], - [0.5416667, 0.125], - [0.5416667, 0.125], - [0.625, 0.125], - [0.625, 0.125], - [0.625, 0.125], - [0.625, 0.125], - [0.625, 0.125], - [0.625, 0.125], - [0.7083333, 0.125], - [0.7083333, 0.125], - [0.7083333, 0.125], - [0.7083333, 0.125], - [0.7083333, 0.125], - [0.7083333, 0.125], - [0.7916667, 0.125], - [0.7916667, 0.125], - [0.7916667, 0.125], - [0.7916667, 0.125], - [0.7916667, 0.125], - [0.7916667, 0.125], - [0.875, 0.125], - [0.875, 0.125], - [0.875, 0.125], - [0.875, 0.125], - [0.875, 0.125], - [0.875, 0.125], - [0.9583333, 0.125], - [0.9583333, 0.125], - [0.9583333, 0.125], - [0.9583333, 0.125], - [0.9583333, 0.125], - [0.9583333, 0.125], - [0.04166667, 0.20833333], - [0.04166667, 0.20833333], - [0.04166667, 0.20833333], - [0.04166667, 0.20833333], - [0.04166667, 0.20833333], - [0.04166667, 0.20833333], - [0.125, 0.20833333], - [0.125, 0.20833333], - [0.125, 0.20833333], - [0.125, 0.20833333], - [0.125, 0.20833333], - [0.125, 0.20833333], - [0.20833333, 0.20833333], - [0.20833333, 0.20833333], - [0.20833333, 0.20833333], - [0.20833333, 0.20833333], - [0.20833333, 0.20833333], - [0.20833333, 0.20833333], - [0.29166666, 0.20833333], - [0.29166666, 0.20833333], - [0.29166666, 0.20833333], - [0.29166666, 0.20833333], - [0.29166666, 0.20833333], - [0.29166666, 0.20833333], - [0.375, 0.20833333], - [0.375, 0.20833333], - [0.375, 0.20833333], - [0.375, 0.20833333], - [0.375, 0.20833333], - [0.375, 0.20833333], - [0.45833334, 0.20833333], - [0.45833334, 0.20833333], - [0.45833334, 0.20833333], - [0.45833334, 0.20833333], - [0.45833334, 0.20833333], - [0.45833334, 0.20833333], - [0.5416667, 0.20833333], - [0.5416667, 0.20833333], - [0.5416667, 0.20833333], - [0.5416667, 0.20833333], - [0.5416667, 0.20833333], - [0.5416667, 0.20833333], - [0.625, 0.20833333], - [0.625, 0.20833333], - [0.625, 0.20833333], - [0.625, 0.20833333], - [0.625, 0.20833333], - [0.625, 0.20833333], - [0.7083333, 0.20833333], - [0.7083333, 0.20833333], - [0.7083333, 0.20833333], - [0.7083333, 0.20833333], - [0.7083333, 0.20833333], - [0.7083333, 0.20833333], - [0.7916667, 0.20833333], - [0.7916667, 0.20833333], - [0.7916667, 0.20833333], - [0.7916667, 0.20833333], - [0.7916667, 0.20833333], - [0.7916667, 0.20833333], - [0.875, 0.20833333], - [0.875, 0.20833333], - [0.875, 0.20833333], - [0.875, 0.20833333], - [0.875, 0.20833333], - [0.875, 0.20833333], - [0.9583333, 0.20833333], - [0.9583333, 0.20833333], - [0.9583333, 0.20833333], - [0.9583333, 0.20833333], - [0.9583333, 0.20833333], - [0.9583333, 0.20833333], - [0.04166667, 0.29166666], - [0.04166667, 0.29166666], - [0.04166667, 0.29166666], - [0.04166667, 0.29166666], - [0.04166667, 0.29166666], - [0.04166667, 0.29166666], - [0.125, 0.29166666], - [0.125, 0.29166666], - [0.125, 0.29166666], - [0.125, 0.29166666], - [0.125, 0.29166666], - [0.125, 0.29166666], - [0.20833333, 0.29166666], - [0.20833333, 0.29166666], - [0.20833333, 0.29166666], - [0.20833333, 0.29166666], - [0.20833333, 0.29166666], - [0.20833333, 0.29166666], - [0.29166666, 0.29166666], - [0.29166666, 0.29166666], - [0.29166666, 0.29166666], - [0.29166666, 0.29166666], - [0.29166666, 0.29166666], - [0.29166666, 0.29166666], - [0.375, 0.29166666], - [0.375, 0.29166666], - [0.375, 0.29166666], - [0.375, 0.29166666], - [0.375, 0.29166666], - [0.375, 0.29166666], - [0.45833334, 0.29166666], - [0.45833334, 0.29166666], - [0.45833334, 0.29166666], - [0.45833334, 0.29166666], - [0.45833334, 0.29166666], - [0.45833334, 0.29166666], - [0.5416667, 0.29166666], - [0.5416667, 0.29166666], - [0.5416667, 0.29166666], - [0.5416667, 0.29166666], - [0.5416667, 0.29166666], - [0.5416667, 0.29166666], - [0.625, 0.29166666], - [0.625, 0.29166666], - [0.625, 0.29166666], - [0.625, 0.29166666], - [0.625, 0.29166666], - [0.625, 0.29166666], - [0.7083333, 0.29166666], - [0.7083333, 0.29166666], - [0.7083333, 0.29166666], - [0.7083333, 0.29166666], - [0.7083333, 0.29166666], - [0.7083333, 0.29166666], - [0.7916667, 0.29166666], - [0.7916667, 0.29166666], - [0.7916667, 0.29166666], - [0.7916667, 0.29166666], - [0.7916667, 0.29166666], - [0.7916667, 0.29166666], - [0.875, 0.29166666], - [0.875, 0.29166666], - [0.875, 0.29166666], - [0.875, 0.29166666], - [0.875, 0.29166666], - [0.875, 0.29166666], - [0.9583333, 0.29166666], - [0.9583333, 0.29166666], - [0.9583333, 0.29166666], - [0.9583333, 0.29166666], - [0.9583333, 0.29166666], - [0.9583333, 0.29166666], - [0.04166667, 0.375], - [0.04166667, 0.375], - [0.04166667, 0.375], - [0.04166667, 0.375], - [0.04166667, 0.375], - [0.04166667, 0.375], - [0.125, 0.375], - [0.125, 0.375], - [0.125, 0.375], - [0.125, 0.375], - [0.125, 0.375], - [0.125, 0.375], - [0.20833333, 0.375], - [0.20833333, 0.375], - [0.20833333, 0.375], - [0.20833333, 0.375], - [0.20833333, 0.375], - [0.20833333, 0.375], - [0.29166666, 0.375], - [0.29166666, 0.375], - [0.29166666, 0.375], - [0.29166666, 0.375], - [0.29166666, 0.375], - [0.29166666, 0.375], - [0.375, 0.375], - [0.375, 0.375], - [0.375, 0.375], - [0.375, 0.375], - [0.375, 0.375], - [0.375, 0.375], - [0.45833334, 0.375], - [0.45833334, 0.375], - [0.45833334, 0.375], - [0.45833334, 0.375], - [0.45833334, 0.375], - [0.45833334, 0.375], - [0.5416667, 0.375], - [0.5416667, 0.375], - [0.5416667, 0.375], - [0.5416667, 0.375], - [0.5416667, 0.375], - [0.5416667, 0.375], - [0.625, 0.375], - [0.625, 0.375], - [0.625, 0.375], - [0.625, 0.375], - [0.625, 0.375], - [0.625, 0.375], - [0.7083333, 0.375], - [0.7083333, 0.375], - [0.7083333, 0.375], - [0.7083333, 0.375], - [0.7083333, 0.375], - [0.7083333, 0.375], - [0.7916667, 0.375], - [0.7916667, 0.375], - [0.7916667, 0.375], - [0.7916667, 0.375], - [0.7916667, 0.375], - [0.7916667, 0.375], - [0.875, 0.375], - [0.875, 0.375], - [0.875, 0.375], - [0.875, 0.375], - [0.875, 0.375], - [0.875, 0.375], - [0.9583333, 0.375], - [0.9583333, 0.375], - [0.9583333, 0.375], - [0.9583333, 0.375], - [0.9583333, 0.375], - [0.9583333, 0.375], - [0.04166667, 0.45833334], - [0.04166667, 0.45833334], - [0.04166667, 0.45833334], - [0.04166667, 0.45833334], - [0.04166667, 0.45833334], - [0.04166667, 0.45833334], - [0.125, 0.45833334], - [0.125, 0.45833334], - [0.125, 0.45833334], - [0.125, 0.45833334], - [0.125, 0.45833334], - [0.125, 0.45833334], - [0.20833333, 0.45833334], - [0.20833333, 0.45833334], - [0.20833333, 0.45833334], - [0.20833333, 0.45833334], - [0.20833333, 0.45833334], - [0.20833333, 0.45833334], - [0.29166666, 0.45833334], - [0.29166666, 0.45833334], - [0.29166666, 0.45833334], - [0.29166666, 0.45833334], - [0.29166666, 0.45833334], - [0.29166666, 0.45833334], - [0.375, 0.45833334], - [0.375, 0.45833334], - [0.375, 0.45833334], - [0.375, 0.45833334], - [0.375, 0.45833334], - [0.375, 0.45833334], - [0.45833334, 0.45833334], - [0.45833334, 0.45833334], - [0.45833334, 0.45833334], - [0.45833334, 0.45833334], - [0.45833334, 0.45833334], - [0.45833334, 0.45833334], - [0.5416667, 0.45833334], - [0.5416667, 0.45833334], - [0.5416667, 0.45833334], - [0.5416667, 0.45833334], - [0.5416667, 0.45833334], - [0.5416667, 0.45833334], - [0.625, 0.45833334], - [0.625, 0.45833334], - [0.625, 0.45833334], - [0.625, 0.45833334], - [0.625, 0.45833334], - [0.625, 0.45833334], - [0.7083333, 0.45833334], - [0.7083333, 0.45833334], - [0.7083333, 0.45833334], - [0.7083333, 0.45833334], - [0.7083333, 0.45833334], - [0.7083333, 0.45833334], - [0.7916667, 0.45833334], - [0.7916667, 0.45833334], - [0.7916667, 0.45833334], - [0.7916667, 0.45833334], - [0.7916667, 0.45833334], - [0.7916667, 0.45833334], - [0.875, 0.45833334], - [0.875, 0.45833334], - [0.875, 0.45833334], - [0.875, 0.45833334], - [0.875, 0.45833334], - [0.875, 0.45833334], - [0.9583333, 0.45833334], - [0.9583333, 0.45833334], - [0.9583333, 0.45833334], - [0.9583333, 0.45833334], - [0.9583333, 0.45833334], - [0.9583333, 0.45833334], - [0.04166667, 0.5416667], - [0.04166667, 0.5416667], - [0.04166667, 0.5416667], - [0.04166667, 0.5416667], - [0.04166667, 0.5416667], - [0.04166667, 0.5416667], - [0.125, 0.5416667], - [0.125, 0.5416667], - [0.125, 0.5416667], - [0.125, 0.5416667], - [0.125, 0.5416667], - [0.125, 0.5416667], - [0.20833333, 0.5416667], - [0.20833333, 0.5416667], - [0.20833333, 0.5416667], - [0.20833333, 0.5416667], - [0.20833333, 0.5416667], - [0.20833333, 0.5416667], - [0.29166666, 0.5416667], - [0.29166666, 0.5416667], - [0.29166666, 0.5416667], - [0.29166666, 0.5416667], - [0.29166666, 0.5416667], - [0.29166666, 0.5416667], - [0.375, 0.5416667], - [0.375, 0.5416667], - [0.375, 0.5416667], - [0.375, 0.5416667], - [0.375, 0.5416667], - [0.375, 0.5416667], - [0.45833334, 0.5416667], - [0.45833334, 0.5416667], - [0.45833334, 0.5416667], - [0.45833334, 0.5416667], - [0.45833334, 0.5416667], - [0.45833334, 0.5416667], - [0.5416667, 0.5416667], - [0.5416667, 0.5416667], - [0.5416667, 0.5416667], - [0.5416667, 0.5416667], - [0.5416667, 0.5416667], - [0.5416667, 0.5416667], - [0.625, 0.5416667], - [0.625, 0.5416667], - [0.625, 0.5416667], - [0.625, 0.5416667], - [0.625, 0.5416667], - [0.625, 0.5416667], - [0.7083333, 0.5416667], - [0.7083333, 0.5416667], - [0.7083333, 0.5416667], - [0.7083333, 0.5416667], - [0.7083333, 0.5416667], - [0.7083333, 0.5416667], - [0.7916667, 0.5416667], - [0.7916667, 0.5416667], - [0.7916667, 0.5416667], - [0.7916667, 0.5416667], - [0.7916667, 0.5416667], - [0.7916667, 0.5416667], - [0.875, 0.5416667], - [0.875, 0.5416667], - [0.875, 0.5416667], - [0.875, 0.5416667], - [0.875, 0.5416667], - [0.875, 0.5416667], - [0.9583333, 0.5416667], - [0.9583333, 0.5416667], - [0.9583333, 0.5416667], - [0.9583333, 0.5416667], - [0.9583333, 0.5416667], - [0.9583333, 0.5416667], - [0.04166667, 0.625], - [0.04166667, 0.625], - [0.04166667, 0.625], - [0.04166667, 0.625], - [0.04166667, 0.625], - [0.04166667, 0.625], - [0.125, 0.625], - [0.125, 0.625], - [0.125, 0.625], - [0.125, 0.625], - [0.125, 0.625], - [0.125, 0.625], - [0.20833333, 0.625], - [0.20833333, 0.625], - [0.20833333, 0.625], - [0.20833333, 0.625], - [0.20833333, 0.625], - [0.20833333, 0.625], - [0.29166666, 0.625], - [0.29166666, 0.625], - [0.29166666, 0.625], - [0.29166666, 0.625], - [0.29166666, 0.625], - [0.29166666, 0.625], - [0.375, 0.625], - [0.375, 0.625], - [0.375, 0.625], - [0.375, 0.625], - [0.375, 0.625], - [0.375, 0.625], - [0.45833334, 0.625], - [0.45833334, 0.625], - [0.45833334, 0.625], - [0.45833334, 0.625], - [0.45833334, 0.625], - [0.45833334, 0.625], - [0.5416667, 0.625], - [0.5416667, 0.625], - [0.5416667, 0.625], - [0.5416667, 0.625], - [0.5416667, 0.625], - [0.5416667, 0.625], - [0.625, 0.625], - [0.625, 0.625], - [0.625, 0.625], - [0.625, 0.625], - [0.625, 0.625], - [0.625, 0.625], - [0.7083333, 0.625], - [0.7083333, 0.625], - [0.7083333, 0.625], - [0.7083333, 0.625], - [0.7083333, 0.625], - [0.7083333, 0.625], - [0.7916667, 0.625], - [0.7916667, 0.625], - [0.7916667, 0.625], - [0.7916667, 0.625], - [0.7916667, 0.625], - [0.7916667, 0.625], - [0.875, 0.625], - [0.875, 0.625], - [0.875, 0.625], - [0.875, 0.625], - [0.875, 0.625], - [0.875, 0.625], - [0.9583333, 0.625], - [0.9583333, 0.625], - [0.9583333, 0.625], - [0.9583333, 0.625], - [0.9583333, 0.625], - [0.9583333, 0.625], - [0.04166667, 0.7083333], - [0.04166667, 0.7083333], - [0.04166667, 0.7083333], - [0.04166667, 0.7083333], - [0.04166667, 0.7083333], - [0.04166667, 0.7083333], - [0.125, 0.7083333], - [0.125, 0.7083333], - [0.125, 0.7083333], - [0.125, 0.7083333], - [0.125, 0.7083333], - [0.125, 0.7083333], - [0.20833333, 0.7083333], - [0.20833333, 0.7083333], - [0.20833333, 0.7083333], - [0.20833333, 0.7083333], - [0.20833333, 0.7083333], - [0.20833333, 0.7083333], - [0.29166666, 0.7083333], - [0.29166666, 0.7083333], - [0.29166666, 0.7083333], - [0.29166666, 0.7083333], - [0.29166666, 0.7083333], - [0.29166666, 0.7083333], - [0.375, 0.7083333], - [0.375, 0.7083333], - [0.375, 0.7083333], - [0.375, 0.7083333], - [0.375, 0.7083333], - [0.375, 0.7083333], - [0.45833334, 0.7083333], - [0.45833334, 0.7083333], - [0.45833334, 0.7083333], - [0.45833334, 0.7083333], - [0.45833334, 0.7083333], - [0.45833334, 0.7083333], - [0.5416667, 0.7083333], - [0.5416667, 0.7083333], - [0.5416667, 0.7083333], - [0.5416667, 0.7083333], - [0.5416667, 0.7083333], - [0.5416667, 0.7083333], - [0.625, 0.7083333], - [0.625, 0.7083333], - [0.625, 0.7083333], - [0.625, 0.7083333], - [0.625, 0.7083333], - [0.625, 0.7083333], - [0.7083333, 0.7083333], - [0.7083333, 0.7083333], - [0.7083333, 0.7083333], - [0.7083333, 0.7083333], - [0.7083333, 0.7083333], - [0.7083333, 0.7083333], - [0.7916667, 0.7083333], - [0.7916667, 0.7083333], - [0.7916667, 0.7083333], - [0.7916667, 0.7083333], - [0.7916667, 0.7083333], - [0.7916667, 0.7083333], - [0.875, 0.7083333], - [0.875, 0.7083333], - [0.875, 0.7083333], - [0.875, 0.7083333], - [0.875, 0.7083333], - [0.875, 0.7083333], - [0.9583333, 0.7083333], - [0.9583333, 0.7083333], - [0.9583333, 0.7083333], - [0.9583333, 0.7083333], - [0.9583333, 0.7083333], - [0.9583333, 0.7083333], - [0.04166667, 0.7916667], - [0.04166667, 0.7916667], - [0.04166667, 0.7916667], - [0.04166667, 0.7916667], - [0.04166667, 0.7916667], - [0.04166667, 0.7916667], - [0.125, 0.7916667], - [0.125, 0.7916667], - [0.125, 0.7916667], - [0.125, 0.7916667], - [0.125, 0.7916667], - [0.125, 0.7916667], - [0.20833333, 0.7916667], - [0.20833333, 0.7916667], - [0.20833333, 0.7916667], - [0.20833333, 0.7916667], - [0.20833333, 0.7916667], - [0.20833333, 0.7916667], - [0.29166666, 0.7916667], - [0.29166666, 0.7916667], - [0.29166666, 0.7916667], - [0.29166666, 0.7916667], - [0.29166666, 0.7916667], - [0.29166666, 0.7916667], - [0.375, 0.7916667], - [0.375, 0.7916667], - [0.375, 0.7916667], - [0.375, 0.7916667], - [0.375, 0.7916667], - [0.375, 0.7916667], - [0.45833334, 0.7916667], - [0.45833334, 0.7916667], - [0.45833334, 0.7916667], - [0.45833334, 0.7916667], - [0.45833334, 0.7916667], - [0.45833334, 0.7916667], - [0.5416667, 0.7916667], - [0.5416667, 0.7916667], - [0.5416667, 0.7916667], - [0.5416667, 0.7916667], - [0.5416667, 0.7916667], - [0.5416667, 0.7916667], - [0.625, 0.7916667], - [0.625, 0.7916667], - [0.625, 0.7916667], - [0.625, 0.7916667], - [0.625, 0.7916667], - [0.625, 0.7916667], - [0.7083333, 0.7916667], - [0.7083333, 0.7916667], - [0.7083333, 0.7916667], - [0.7083333, 0.7916667], - [0.7083333, 0.7916667], - [0.7083333, 0.7916667], - [0.7916667, 0.7916667], - [0.7916667, 0.7916667], - [0.7916667, 0.7916667], - [0.7916667, 0.7916667], - [0.7916667, 0.7916667], - [0.7916667, 0.7916667], - [0.875, 0.7916667], - [0.875, 0.7916667], - [0.875, 0.7916667], - [0.875, 0.7916667], - [0.875, 0.7916667], - [0.875, 0.7916667], - [0.9583333, 0.7916667], - [0.9583333, 0.7916667], - [0.9583333, 0.7916667], - [0.9583333, 0.7916667], - [0.9583333, 0.7916667], - [0.9583333, 0.7916667], - [0.04166667, 0.875], - [0.04166667, 0.875], - [0.04166667, 0.875], - [0.04166667, 0.875], - [0.04166667, 0.875], - [0.04166667, 0.875], - [0.125, 0.875], - [0.125, 0.875], - [0.125, 0.875], - [0.125, 0.875], - [0.125, 0.875], - [0.125, 0.875], - [0.20833333, 0.875], - [0.20833333, 0.875], - [0.20833333, 0.875], - [0.20833333, 0.875], - [0.20833333, 0.875], - [0.20833333, 0.875], - [0.29166666, 0.875], - [0.29166666, 0.875], - [0.29166666, 0.875], - [0.29166666, 0.875], - [0.29166666, 0.875], - [0.29166666, 0.875], - [0.375, 0.875], - [0.375, 0.875], - [0.375, 0.875], - [0.375, 0.875], - [0.375, 0.875], - [0.375, 0.875], - [0.45833334, 0.875], - [0.45833334, 0.875], - [0.45833334, 0.875], - [0.45833334, 0.875], - [0.45833334, 0.875], - [0.45833334, 0.875], - [0.5416667, 0.875], - [0.5416667, 0.875], - [0.5416667, 0.875], - [0.5416667, 0.875], - [0.5416667, 0.875], - [0.5416667, 0.875], - [0.625, 0.875], - [0.625, 0.875], - [0.625, 0.875], - [0.625, 0.875], - [0.625, 0.875], - [0.625, 0.875], - [0.7083333, 0.875], - [0.7083333, 0.875], - [0.7083333, 0.875], - [0.7083333, 0.875], - [0.7083333, 0.875], - [0.7083333, 0.875], - [0.7916667, 0.875], - [0.7916667, 0.875], - [0.7916667, 0.875], - [0.7916667, 0.875], - [0.7916667, 0.875], - [0.7916667, 0.875], - [0.875, 0.875], - [0.875, 0.875], - [0.875, 0.875], - [0.875, 0.875], - [0.875, 0.875], - [0.875, 0.875], - [0.9583333, 0.875], - [0.9583333, 0.875], - [0.9583333, 0.875], - [0.9583333, 0.875], - [0.9583333, 0.875], - [0.9583333, 0.875], - [0.04166667, 0.9583333], - [0.04166667, 0.9583333], - [0.04166667, 0.9583333], - [0.04166667, 0.9583333], - [0.04166667, 0.9583333], - [0.04166667, 0.9583333], - [0.125, 0.9583333], - [0.125, 0.9583333], - [0.125, 0.9583333], - [0.125, 0.9583333], - [0.125, 0.9583333], - [0.125, 0.9583333], - [0.20833333, 0.9583333], - [0.20833333, 0.9583333], - [0.20833333, 0.9583333], - [0.20833333, 0.9583333], - [0.20833333, 0.9583333], - [0.20833333, 0.9583333], - [0.29166666, 0.9583333], - [0.29166666, 0.9583333], - [0.29166666, 0.9583333], - [0.29166666, 0.9583333], - [0.29166666, 0.9583333], - [0.29166666, 0.9583333], - [0.375, 0.9583333], - [0.375, 0.9583333], - [0.375, 0.9583333], - [0.375, 0.9583333], - [0.375, 0.9583333], - [0.375, 0.9583333], - [0.45833334, 0.9583333], - [0.45833334, 0.9583333], - [0.45833334, 0.9583333], - [0.45833334, 0.9583333], - [0.45833334, 0.9583333], - [0.45833334, 0.9583333], - [0.5416667, 0.9583333], - [0.5416667, 0.9583333], - [0.5416667, 0.9583333], - [0.5416667, 0.9583333], - [0.5416667, 0.9583333], - [0.5416667, 0.9583333], - [0.625, 0.9583333], - [0.625, 0.9583333], - [0.625, 0.9583333], - [0.625, 0.9583333], - [0.625, 0.9583333], - [0.625, 0.9583333], - [0.7083333, 0.9583333], - [0.7083333, 0.9583333], - [0.7083333, 0.9583333], - [0.7083333, 0.9583333], - [0.7083333, 0.9583333], - [0.7083333, 0.9583333], - [0.7916667, 0.9583333], - [0.7916667, 0.9583333], - [0.7916667, 0.9583333], - [0.7916667, 0.9583333], - [0.7916667, 0.9583333], - [0.7916667, 0.9583333], - [0.875, 0.9583333], - [0.875, 0.9583333], - [0.875, 0.9583333], - [0.875, 0.9583333], - [0.875, 0.9583333], - [0.875, 0.9583333], - [0.9583333, 0.9583333], - [0.9583333, 0.9583333], - [0.9583333, 0.9583333], - [0.9583333, 0.9583333], - [0.9583333, 0.9583333], - [0.9583333, 0.9583333]], dtype=np.float32) diff --git a/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb.onnx b/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb.onnx deleted file mode 100644 index a899f870..00000000 --- a/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:78ff51c38496b7fc8b8ebdb6cc8c1abb02fa6c38427c6848254cdaba57fcce7c -size 3905734 diff --git a/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8.onnx b/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8.onnx deleted file mode 100644 index 8e4c39d8..00000000 --- a/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9f014de96ef5b6816b3eb9a5fed21a7371ef0f104ea440aa19ce9129fe2af5f6 -size 1157004 diff --git a/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8bq.onnx b/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8bq.onnx deleted file mode 100644 index a19254c6..00000000 --- a/models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d0096a81cf82349b00d0c4cb965973662a86967a8d44ccd8134da2a2f408ee5c -size 1169351 diff --git a/models/person_detection_mediapipe/CMakeLists.txt b/models/person_detection_mediapipe/CMakeLists.txt deleted file mode 100644 index e3f4b051..00000000 --- a/models/person_detection_mediapipe/CMakeLists.txt +++ /dev/null @@ -1,29 +0,0 @@ -cmake_minimum_required(VERSION 3.24.0) -set(project_name "opencv_zoo_person_detection_mediapipe") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/person_detection_mediapipe/LICENSE b/models/person_detection_mediapipe/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/person_detection_mediapipe/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/person_detection_mediapipe/README.md b/models/person_detection_mediapipe/README.md deleted file mode 100644 index bba8bd44..00000000 --- a/models/person_detection_mediapipe/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# Person detector from MediaPipe Pose - -This model detects upper body and full body keypoints of a person, and is downloaded from https://github.com/PINTO0309/PINTO_model_zoo/blob/main/053_BlazePose/20_densify_pose_detection/download.sh or converted from TFLite to ONNX using following tools: - -- TFLite model to ONNX with MediaPipe custom `densify` op: https://github.com/PINTO0309/tflite2tensorflow -- simplified by [onnx-simplifier](https://github.com/daquexian/onnx-simplifier) - -SSD Anchors are generated from [GenMediaPipePalmDectionSSDAnchors](https://github.com/VimalMollyn/GenMediaPipePalmDectionSSDAnchors) - -**Note**: -- `person_detection_mediapipe_2023mar_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -### Python - -Run the following commands to try the demo: - -```bash -# detect on camera input -python demo.py -# detect on an image -python demo.py -i /path/to/image -v - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/opencv_zoo_person_detection_mediapipe -# detect on an image -./build/opencv_zoo_person_detection_mediapipe -m=/path/to/model -i=/path/to/image -v -# get help messages -./build/opencv_zoo_person_detection_mediapipe -h -``` - -### Example outputs - -![webcam demo](./example_outputs/mppersondet_demo.webp) - -## License - -All files in this directory are licensed under [Apache 2.0 License](LICENSE). - -## Reference -- MediaPipe Pose: https://developers.google.com/mediapipe/solutions/vision/pose_landmarker -- MediaPipe pose model and model card: https://github.com/google/mediapipe/blob/master/docs/solutions/models.md#pose -- BlazePose TFJS: https://github.com/tensorflow/tfjs-models/tree/master/pose-detection/src/blazepose_tfjs diff --git a/models/person_detection_mediapipe/demo.cpp b/models/person_detection_mediapipe/demo.cpp deleted file mode 100644 index 59149fd3..00000000 --- a/models/person_detection_mediapipe/demo.cpp +++ /dev/null @@ -1,2522 +0,0 @@ -#include -#include -#include - -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -vector< pair > backendTargetPairs = { - std::make_pair(dnn::DNN_BACKEND_OPENCV, dnn::DNN_TARGET_CPU), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA_FP16), - std::make_pair(dnn::DNN_BACKEND_TIMVX, dnn::DNN_TARGET_NPU), - std::make_pair(dnn::DNN_BACKEND_CANN, dnn::DNN_TARGET_NPU) }; - - -Mat getMediapipeAnchor(); - -class MPPersonDet { -private: - Net net; - string modelPath; - Size inputSize; - float scoreThreshold; - float nmsThreshold; - dnn::Backend backendId; - dnn::Target targetId; - int topK; - Mat anchors; - -public: - MPPersonDet(string modPath, float nmsThresh = 0.3, float scoreThresh = 0.5, int tok = 5000, dnn::Backend bId = DNN_BACKEND_DEFAULT, dnn::Target tId = DNN_TARGET_CPU) : - modelPath(modPath), nmsThreshold(nmsThresh), - scoreThreshold(scoreThresh), topK(tok), - backendId(bId), targetId(tId) - { - this->inputSize = Size(224, 224); - this->net = readNet(this->modelPath); - this->net.setPreferableBackend(this->backendId); - this->net.setPreferableTarget(this->targetId); - this->anchors = getMediapipeAnchor(); - } - - pair preprocess(Mat img) - { - Mat blob; - Image2BlobParams paramMediapipe; - paramMediapipe.datalayout = DNN_LAYOUT_NCHW; - paramMediapipe.ddepth = CV_32F; - paramMediapipe.mean = Scalar::all(127.5); - paramMediapipe.scalefactor = Scalar::all(1 / 127.5); - paramMediapipe.size = this->inputSize; - paramMediapipe.swapRB = true; - paramMediapipe.paddingmode = DNN_PMODE_LETTERBOX; - - double ratio = min(this->inputSize.height / double(img.rows), this->inputSize.width / double(img.cols)); - Size padBias(0, 0); - if (img.rows != this->inputSize.height || img.cols != this->inputSize.width) - { - // keep aspect ratio when resize - Size ratioSize(int(img.cols * ratio), int(img.rows * ratio)); - int padH = this->inputSize.height - ratioSize.height; - int padW = this->inputSize.width - ratioSize.width; - padBias.width = padW / 2; - padBias.height = padH / 2; - } - blob = blobFromImageWithParams(img, paramMediapipe); - padBias = Size(int(padBias.width / ratio), int(padBias.height / ratio)); - return pair(blob, padBias); - } - - Mat infer(Mat srcimg) - { - pair w = this->preprocess(srcimg); - Mat inputBlob = get<0>(w); - Size padBias = get<1>(w); - this->net.setInput(inputBlob); - vector outs; - this->net.forward(outs, this->net.getUnconnectedOutLayersNames()); - Mat predictions = this->postprocess(outs, Size(srcimg.cols, srcimg.rows), padBias); - return predictions; - } - - Mat postprocess(vector outputs, Size orgSize, Size padBias) - { - Mat score = outputs[1].reshape(0, outputs[1].size[0]); - Mat boxLandDelta = outputs[0].reshape(outputs[0].size[0], outputs[0].size[1]); - Mat boxDelta = boxLandDelta.colRange(0, 4); - Mat landmarkDelta = boxLandDelta.colRange(4, boxLandDelta.cols); - double scale = max(orgSize.height, orgSize.width); - Mat mask = score < -100; - score.setTo(-100, mask); - mask = score > 100; - score.setTo(100, mask); - Mat deno; - exp(-score, deno); - divide(1.0, 1 + deno, score); - boxDelta.colRange(0, 1) = boxDelta.colRange(0, 1) / this->inputSize.width; - boxDelta.colRange(1, 2) = boxDelta.colRange(1, 2) / this->inputSize.height; - boxDelta.colRange(2, 3) = boxDelta.colRange(2, 3) / this->inputSize.width; - boxDelta.colRange(3, 4) = boxDelta.colRange(3, 4) / this->inputSize.height; - Mat xy1 = (boxDelta.colRange(0, 2) - boxDelta.colRange(2, 4) / 2 + this->anchors) * scale; - Mat xy2 = (boxDelta.colRange(0, 2) + boxDelta.colRange(2, 4) / 2 + this->anchors) * scale; - Mat boxes; - hconcat(xy1, xy2, boxes); - vector< Rect2d > rotBoxes(boxes.rows); - boxes.colRange(0, 1) = boxes.colRange(0, 1) - padBias.width; - boxes.colRange(1, 2) = boxes.colRange(1, 2) - padBias.height; - boxes.colRange(2, 3) = boxes.colRange(2, 3) - padBias.width; - boxes.colRange(3, 4) = boxes.colRange(3, 4) - padBias.height; - for (int i = 0; i < boxes.rows; i++) - { - rotBoxes[i] = Rect2d(Point2d(boxes.at(i, 0), boxes.at(i, 1)), Point2d(boxes.at(i, 2), boxes.at(i, 3))); - } - vector< int > keep; - NMSBoxes(rotBoxes, score, this->scoreThreshold, this->nmsThreshold, keep, this->topK); - if (keep.size() == 0) - return Mat(); - int nbCols = landmarkDelta.cols + boxes.cols + 1; - Mat candidates(int(keep.size()), nbCols, CV_32FC1); - int row = 0; - for (auto idx : keep) - { - candidates.at(row, nbCols - 1) = score.at(idx); - boxes.row(idx).copyTo(candidates.row(row).colRange(0, 4)); - candidates.at(row, 4) = (landmarkDelta.at(idx, 0) / this->inputSize.width + this->anchors.at(idx, 0)) * scale - padBias.width; - candidates.at(row, 5) = (landmarkDelta.at(idx, 1) / this->inputSize.height + this->anchors.at(idx, 1)) * scale - padBias.height; - candidates.at(row, 6) = (landmarkDelta.at(idx, 2) / this->inputSize.width + this->anchors.at(idx, 0)) * scale - padBias.width; - candidates.at(row, 7) = (landmarkDelta.at(idx, 3) / this->inputSize.height + this->anchors.at(idx, 1)) * scale - padBias.height; - candidates.at(row, 8) = (landmarkDelta.at(idx, 4) / this->inputSize.width + this->anchors.at(idx, 0)) * scale - padBias.width; - candidates.at(row, 9) = (landmarkDelta.at(idx, 5) / this->inputSize.height + this->anchors.at(idx, 1)) * scale - padBias.height; - candidates.at(row, 10) = (landmarkDelta.at(idx, 6) / this->inputSize.width + this->anchors.at(idx, 0)) * scale - padBias.width; - candidates.at(row, 11) = (landmarkDelta.at(idx, 7) / this->inputSize.height + this->anchors.at(idx, 1)) * scale - padBias.height; - row++; - } - return candidates; - - } - - -}; -std::string keys = -"{ help h | | Print help message. }" -"{ model m | person_detection_mediapipe_2023mar.onnx | Usage: Path to the model, defaults to person_detection_mediapipe_2023mar.onnx }" -"{ input i | | Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ score_threshold | 0.5 | Usage: Set the minimum needed confidence for the model to identify a person, defaults to 0.5. Smaller values may result in faster detection, but will limit accuracy. Filter out persons of confidence < conf_threshold. }" -"{ nms_threshold | 0.3 | Usage: Suppress bounding boxes of iou >= nms_threshold. Default = 0.3. }" -"{ top_k | 1 | Usage: Keep top_k bounding boxes before NMS. }" -"{ save s | 0 | Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input. }" -"{ vis v | 1 | Usage: Specify to open a new window to show results. Invalid in case of camera input. }" -"{ backend bt | 0 | Choose one of computation backends: " -"0: (default) OpenCV implementation + CPU, " -"1: CUDA + GPU (CUDA), " -"2: CUDA + GPU (CUDA FP16), " -"3: TIM-VX + NPU, " -"4: CANN + NPU}"; - -Mat visualize(Mat img, Mat results, double fps = -1) -{ - Mat resImg = img.clone(); - if (fps > 0) - putText(resImg, format("FPS: %2f", fps), Point(0, 15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0, 0, 255)); - - for (int row = 0; row < results.rows; row++) - { - float score = results.at(row, results.cols - 1); - Mat personLandmarks; - results.row(row).colRange(4, results.cols - 1).reshape(0, 4).convertTo(personLandmarks, CV_32S); - - Point hipPoint = Point(personLandmarks.row(0)); - Point fullBody = Point(personLandmarks.row(1)); - Point shoulderPoint = Point(personLandmarks.row(2)); - Point upperBody = Point(personLandmarks.row(3)); - - // draw circle for full body - int radius = int(norm(hipPoint - fullBody)); - circle(resImg, hipPoint, radius, Scalar(255, 0, 0), 2); - - // draw circle for upper body - radius = int(norm(shoulderPoint - upperBody)); - circle(resImg, shoulderPoint, radius, Scalar(0, 255, 255), 2); - - // draw points for each keypoint - for (int iRow=0; iRow < personLandmarks.rows; iRow++) - circle(resImg, Point(personLandmarks.row(iRow)), 2, Scalar(0, 0, 255), 2); - putText(resImg, format("Score: %4f", score), Point(0, resImg.rows - 48), FONT_HERSHEY_DUPLEX, 0.5, Scalar(0, 255, 0)); - } - // put score - putText(resImg, string("Yellow: upper body circle"), Point(0, resImg.rows - 36), FONT_HERSHEY_DUPLEX, 0.5, Scalar(0, 255, 255)); - putText(resImg, string("Blue: full body circle"), Point(0, resImg.rows - 24), FONT_HERSHEY_DUPLEX, 0.5, Scalar(255, 0, 0)); - putText(resImg, string("Red: keypoint"), Point(0, resImg.rows - 12), FONT_HERSHEY_DUPLEX, 0.5, Scalar(0, 0, 255)); - - return resImg; -} - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Person Detector from MediaPipe"); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - string model = parser.get("model"); - float scoreThreshold = parser.get("score_threshold"); - float nmsThreshold = parser.get("nms_threshold"); - int topK = parser.get("top_k"); - bool vis = parser.get("vis"); - bool save = parser.get("save"); - int backendTargetid = parser.get("backend"); - - if (model.empty()) - { - CV_Error(Error::StsError, "Model file " + model + " not found"); - } - VideoCapture cap; - if (parser.has("input")) - cap.open(samples::findFile(parser.get("input"))); - else - cap.open(0); - Mat frame; - - MPPersonDet modelNet(model, nmsThreshold, scoreThreshold, topK, - backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - //! [Open a video file or an image file or a camera stream] - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot open video or file"); - - static const std::string kWinName = "MPPersonDet Demo"; - while (waitKey(1) < 0) - { - cap >> frame; - if (frame.empty()) - { - cout << "Frame is empty" << endl; - waitKey(); - break; - } - TickMeter tm; - tm.start(); - Mat results = modelNet.infer(frame); - tm.stop(); - cout << "Inference time: " << tm.getTimeMilli() << " ms\n"; - Mat img = visualize(frame, results, tm.getFPS()); - if (save && parser.has("input")) - { - cout << "Results saved to result.jpg\n"; - imwrite("result.jpg", img); - } - - if (vis || !parser.has("input")) - { - imshow(kWinName, img); - } - } - return 0; -} - - -Mat getMediapipeAnchor() -{ - Mat anchor= (Mat_(2254,2) << 0.017857142857142856, 0.017857142857142856, - 0.017857142857142856, 0.017857142857142856, - 0.05357142857142857, 0.017857142857142856, - 0.05357142857142857, 0.017857142857142856, - 0.08928571428571429, 0.017857142857142856, - 0.08928571428571429, 0.017857142857142856, - 0.125, 0.017857142857142856, - 0.125, 0.017857142857142856, - 0.16071428571428573, 0.017857142857142856, - 0.16071428571428573, 0.017857142857142856, - 0.19642857142857142, 0.017857142857142856, - 0.19642857142857142, 0.017857142857142856, - 0.23214285714285715, 0.017857142857142856, - 0.23214285714285715, 0.017857142857142856, - 0.26785714285714285, 0.017857142857142856, - 0.26785714285714285, 0.017857142857142856, - 0.30357142857142855, 0.017857142857142856, - 0.30357142857142855, 0.017857142857142856, - 0.3392857142857143, 0.017857142857142856, - 0.3392857142857143, 0.017857142857142856, - 0.375, 0.017857142857142856, - 0.375, 0.017857142857142856, - 0.4107142857142857, 0.017857142857142856, - 0.4107142857142857, 0.017857142857142856, - 0.44642857142857145, 0.017857142857142856, - 0.44642857142857145, 0.017857142857142856, - 0.48214285714285715, 0.017857142857142856, - 0.48214285714285715, 0.017857142857142856, - 0.5178571428571429, 0.017857142857142856, - 0.5178571428571429, 0.017857142857142856, - 0.5535714285714286, 0.017857142857142856, - 0.5535714285714286, 0.017857142857142856, - 0.5892857142857143, 0.017857142857142856, - 0.5892857142857143, 0.017857142857142856, - 0.625, 0.017857142857142856, - 0.625, 0.017857142857142856, - 0.6607142857142857, 0.017857142857142856, - 0.6607142857142857, 0.017857142857142856, - 0.6964285714285714, 0.017857142857142856, - 0.6964285714285714, 0.017857142857142856, - 0.7321428571428571, 0.017857142857142856, - 0.7321428571428571, 0.017857142857142856, - 0.7678571428571429, 0.017857142857142856, - 0.7678571428571429, 0.017857142857142856, - 0.8035714285714286, 0.017857142857142856, - 0.8035714285714286, 0.017857142857142856, - 0.8392857142857143, 0.017857142857142856, - 0.8392857142857143, 0.017857142857142856, - 0.875, 0.017857142857142856, - 0.875, 0.017857142857142856, - 0.9107142857142857, 0.017857142857142856, - 0.9107142857142857, 0.017857142857142856, - 0.9464285714285714, 0.017857142857142856, - 0.9464285714285714, 0.017857142857142856, - 0.9821428571428571, 0.017857142857142856, - 0.9821428571428571, 0.017857142857142856, - 0.017857142857142856, 0.05357142857142857, - 0.017857142857142856, 0.05357142857142857, - 0.05357142857142857, 0.05357142857142857, - 0.05357142857142857, 0.05357142857142857, - 0.08928571428571429, 0.05357142857142857, - 0.08928571428571429, 0.05357142857142857, - 0.125, 0.05357142857142857, - 0.125, 0.05357142857142857, - 0.16071428571428573, 0.05357142857142857, - 0.16071428571428573, 0.05357142857142857, - 0.19642857142857142, 0.05357142857142857, - 0.19642857142857142, 0.05357142857142857, - 0.23214285714285715, 0.05357142857142857, - 0.23214285714285715, 0.05357142857142857, - 0.26785714285714285, 0.05357142857142857, - 0.26785714285714285, 0.05357142857142857, - 0.30357142857142855, 0.05357142857142857, - 0.30357142857142855, 0.05357142857142857, - 0.3392857142857143, 0.05357142857142857, - 0.3392857142857143, 0.05357142857142857, - 0.375, 0.05357142857142857, - 0.375, 0.05357142857142857, - 0.4107142857142857, 0.05357142857142857, - 0.4107142857142857, 0.05357142857142857, - 0.44642857142857145, 0.05357142857142857, - 0.44642857142857145, 0.05357142857142857, - 0.48214285714285715, 0.05357142857142857, - 0.48214285714285715, 0.05357142857142857, - 0.5178571428571429, 0.05357142857142857, - 0.5178571428571429, 0.05357142857142857, - 0.5535714285714286, 0.05357142857142857, - 0.5535714285714286, 0.05357142857142857, - 0.5892857142857143, 0.05357142857142857, - 0.5892857142857143, 0.05357142857142857, - 0.625, 0.05357142857142857, - 0.625, 0.05357142857142857, - 0.6607142857142857, 0.05357142857142857, - 0.6607142857142857, 0.05357142857142857, - 0.6964285714285714, 0.05357142857142857, - 0.6964285714285714, 0.05357142857142857, - 0.7321428571428571, 0.05357142857142857, - 0.7321428571428571, 0.05357142857142857, - 0.7678571428571429, 0.05357142857142857, - 0.7678571428571429, 0.05357142857142857, - 0.8035714285714286, 0.05357142857142857, - 0.8035714285714286, 0.05357142857142857, - 0.8392857142857143, 0.05357142857142857, - 0.8392857142857143, 0.05357142857142857, - 0.875, 0.05357142857142857, - 0.875, 0.05357142857142857, - 0.9107142857142857, 0.05357142857142857, - 0.9107142857142857, 0.05357142857142857, - 0.9464285714285714, 0.05357142857142857, - 0.9464285714285714, 0.05357142857142857, - 0.9821428571428571, 0.05357142857142857, - 0.9821428571428571, 0.05357142857142857, - 0.017857142857142856, 0.08928571428571429, - 0.017857142857142856, 0.08928571428571429, - 0.05357142857142857, 0.08928571428571429, - 0.05357142857142857, 0.08928571428571429, - 0.08928571428571429, 0.08928571428571429, - 0.08928571428571429, 0.08928571428571429, - 0.125, 0.08928571428571429, - 0.125, 0.08928571428571429, - 0.16071428571428573, 0.08928571428571429, - 0.16071428571428573, 0.08928571428571429, - 0.19642857142857142, 0.08928571428571429, - 0.19642857142857142, 0.08928571428571429, - 0.23214285714285715, 0.08928571428571429, - 0.23214285714285715, 0.08928571428571429, - 0.26785714285714285, 0.08928571428571429, - 0.26785714285714285, 0.08928571428571429, - 0.30357142857142855, 0.08928571428571429, - 0.30357142857142855, 0.08928571428571429, - 0.3392857142857143, 0.08928571428571429, - 0.3392857142857143, 0.08928571428571429, - 0.375, 0.08928571428571429, - 0.375, 0.08928571428571429, - 0.4107142857142857, 0.08928571428571429, - 0.4107142857142857, 0.08928571428571429, - 0.44642857142857145, 0.08928571428571429, - 0.44642857142857145, 0.08928571428571429, - 0.48214285714285715, 0.08928571428571429, - 0.48214285714285715, 0.08928571428571429, - 0.5178571428571429, 0.08928571428571429, - 0.5178571428571429, 0.08928571428571429, - 0.5535714285714286, 0.08928571428571429, - 0.5535714285714286, 0.08928571428571429, - 0.5892857142857143, 0.08928571428571429, - 0.5892857142857143, 0.08928571428571429, - 0.625, 0.08928571428571429, - 0.625, 0.08928571428571429, - 0.6607142857142857, 0.08928571428571429, - 0.6607142857142857, 0.08928571428571429, - 0.6964285714285714, 0.08928571428571429, - 0.6964285714285714, 0.08928571428571429, - 0.7321428571428571, 0.08928571428571429, - 0.7321428571428571, 0.08928571428571429, - 0.7678571428571429, 0.08928571428571429, - 0.7678571428571429, 0.08928571428571429, - 0.8035714285714286, 0.08928571428571429, - 0.8035714285714286, 0.08928571428571429, - 0.8392857142857143, 0.08928571428571429, - 0.8392857142857143, 0.08928571428571429, - 0.875, 0.08928571428571429, - 0.875, 0.08928571428571429, - 0.9107142857142857, 0.08928571428571429, - 0.9107142857142857, 0.08928571428571429, - 0.9464285714285714, 0.08928571428571429, - 0.9464285714285714, 0.08928571428571429, - 0.9821428571428571, 0.08928571428571429, - 0.9821428571428571, 0.08928571428571429, - 0.017857142857142856, 0.125, - 0.017857142857142856, 0.125, - 0.05357142857142857, 0.125, - 0.05357142857142857, 0.125, - 0.08928571428571429, 0.125, - 0.08928571428571429, 0.125, - 0.125, 0.125, - 0.125, 0.125, - 0.16071428571428573, 0.125, - 0.16071428571428573, 0.125, - 0.19642857142857142, 0.125, - 0.19642857142857142, 0.125, - 0.23214285714285715, 0.125, - 0.23214285714285715, 0.125, - 0.26785714285714285, 0.125, - 0.26785714285714285, 0.125, - 0.30357142857142855, 0.125, - 0.30357142857142855, 0.125, - 0.3392857142857143, 0.125, - 0.3392857142857143, 0.125, - 0.375, 0.125, - 0.375, 0.125, - 0.4107142857142857, 0.125, - 0.4107142857142857, 0.125, - 0.44642857142857145, 0.125, - 0.44642857142857145, 0.125, - 0.48214285714285715, 0.125, - 0.48214285714285715, 0.125, - 0.5178571428571429, 0.125, - 0.5178571428571429, 0.125, - 0.5535714285714286, 0.125, - 0.5535714285714286, 0.125, - 0.5892857142857143, 0.125, - 0.5892857142857143, 0.125, - 0.625, 0.125, - 0.625, 0.125, - 0.6607142857142857, 0.125, - 0.6607142857142857, 0.125, - 0.6964285714285714, 0.125, - 0.6964285714285714, 0.125, - 0.7321428571428571, 0.125, - 0.7321428571428571, 0.125, - 0.7678571428571429, 0.125, - 0.7678571428571429, 0.125, - 0.8035714285714286, 0.125, - 0.8035714285714286, 0.125, - 0.8392857142857143, 0.125, - 0.8392857142857143, 0.125, - 0.875, 0.125, - 0.875, 0.125, - 0.9107142857142857, 0.125, - 0.9107142857142857, 0.125, - 0.9464285714285714, 0.125, - 0.9464285714285714, 0.125, - 0.9821428571428571, 0.125, - 0.9821428571428571, 0.125, - 0.017857142857142856, 0.16071428571428573, - 0.017857142857142856, 0.16071428571428573, - 0.05357142857142857, 0.16071428571428573, - 0.05357142857142857, 0.16071428571428573, - 0.08928571428571429, 0.16071428571428573, - 0.08928571428571429, 0.16071428571428573, - 0.125, 0.16071428571428573, - 0.125, 0.16071428571428573, - 0.16071428571428573, 0.16071428571428573, - 0.16071428571428573, 0.16071428571428573, - 0.19642857142857142, 0.16071428571428573, - 0.19642857142857142, 0.16071428571428573, - 0.23214285714285715, 0.16071428571428573, - 0.23214285714285715, 0.16071428571428573, - 0.26785714285714285, 0.16071428571428573, - 0.26785714285714285, 0.16071428571428573, - 0.30357142857142855, 0.16071428571428573, - 0.30357142857142855, 0.16071428571428573, - 0.3392857142857143, 0.16071428571428573, - 0.3392857142857143, 0.16071428571428573, - 0.375, 0.16071428571428573, - 0.375, 0.16071428571428573, - 0.4107142857142857, 0.16071428571428573, - 0.4107142857142857, 0.16071428571428573, - 0.44642857142857145, 0.16071428571428573, - 0.44642857142857145, 0.16071428571428573, - 0.48214285714285715, 0.16071428571428573, - 0.48214285714285715, 0.16071428571428573, - 0.5178571428571429, 0.16071428571428573, - 0.5178571428571429, 0.16071428571428573, - 0.5535714285714286, 0.16071428571428573, - 0.5535714285714286, 0.16071428571428573, - 0.5892857142857143, 0.16071428571428573, - 0.5892857142857143, 0.16071428571428573, - 0.625, 0.16071428571428573, - 0.625, 0.16071428571428573, - 0.6607142857142857, 0.16071428571428573, - 0.6607142857142857, 0.16071428571428573, - 0.6964285714285714, 0.16071428571428573, - 0.6964285714285714, 0.16071428571428573, - 0.7321428571428571, 0.16071428571428573, - 0.7321428571428571, 0.16071428571428573, - 0.7678571428571429, 0.16071428571428573, - 0.7678571428571429, 0.16071428571428573, - 0.8035714285714286, 0.16071428571428573, - 0.8035714285714286, 0.16071428571428573, - 0.8392857142857143, 0.16071428571428573, - 0.8392857142857143, 0.16071428571428573, - 0.875, 0.16071428571428573, - 0.875, 0.16071428571428573, - 0.9107142857142857, 0.16071428571428573, - 0.9107142857142857, 0.16071428571428573, - 0.9464285714285714, 0.16071428571428573, - 0.9464285714285714, 0.16071428571428573, - 0.9821428571428571, 0.16071428571428573, - 0.9821428571428571, 0.16071428571428573, - 0.017857142857142856, 0.19642857142857142, - 0.017857142857142856, 0.19642857142857142, - 0.05357142857142857, 0.19642857142857142, - 0.05357142857142857, 0.19642857142857142, - 0.08928571428571429, 0.19642857142857142, - 0.08928571428571429, 0.19642857142857142, - 0.125, 0.19642857142857142, - 0.125, 0.19642857142857142, - 0.16071428571428573, 0.19642857142857142, - 0.16071428571428573, 0.19642857142857142, - 0.19642857142857142, 0.19642857142857142, - 0.19642857142857142, 0.19642857142857142, - 0.23214285714285715, 0.19642857142857142, - 0.23214285714285715, 0.19642857142857142, - 0.26785714285714285, 0.19642857142857142, - 0.26785714285714285, 0.19642857142857142, - 0.30357142857142855, 0.19642857142857142, - 0.30357142857142855, 0.19642857142857142, - 0.3392857142857143, 0.19642857142857142, - 0.3392857142857143, 0.19642857142857142, - 0.375, 0.19642857142857142, - 0.375, 0.19642857142857142, - 0.4107142857142857, 0.19642857142857142, - 0.4107142857142857, 0.19642857142857142, - 0.44642857142857145, 0.19642857142857142, - 0.44642857142857145, 0.19642857142857142, - 0.48214285714285715, 0.19642857142857142, - 0.48214285714285715, 0.19642857142857142, - 0.5178571428571429, 0.19642857142857142, - 0.5178571428571429, 0.19642857142857142, - 0.5535714285714286, 0.19642857142857142, - 0.5535714285714286, 0.19642857142857142, - 0.5892857142857143, 0.19642857142857142, - 0.5892857142857143, 0.19642857142857142, - 0.625, 0.19642857142857142, - 0.625, 0.19642857142857142, - 0.6607142857142857, 0.19642857142857142, - 0.6607142857142857, 0.19642857142857142, - 0.6964285714285714, 0.19642857142857142, - 0.6964285714285714, 0.19642857142857142, - 0.7321428571428571, 0.19642857142857142, - 0.7321428571428571, 0.19642857142857142, - 0.7678571428571429, 0.19642857142857142, - 0.7678571428571429, 0.19642857142857142, - 0.8035714285714286, 0.19642857142857142, - 0.8035714285714286, 0.19642857142857142, - 0.8392857142857143, 0.19642857142857142, - 0.8392857142857143, 0.19642857142857142, - 0.875, 0.19642857142857142, - 0.875, 0.19642857142857142, - 0.9107142857142857, 0.19642857142857142, - 0.9107142857142857, 0.19642857142857142, - 0.9464285714285714, 0.19642857142857142, - 0.9464285714285714, 0.19642857142857142, - 0.9821428571428571, 0.19642857142857142, - 0.9821428571428571, 0.19642857142857142, - 0.017857142857142856, 0.23214285714285715, - 0.017857142857142856, 0.23214285714285715, - 0.05357142857142857, 0.23214285714285715, - 0.05357142857142857, 0.23214285714285715, - 0.08928571428571429, 0.23214285714285715, - 0.08928571428571429, 0.23214285714285715, - 0.125, 0.23214285714285715, - 0.125, 0.23214285714285715, - 0.16071428571428573, 0.23214285714285715, - 0.16071428571428573, 0.23214285714285715, - 0.19642857142857142, 0.23214285714285715, - 0.19642857142857142, 0.23214285714285715, - 0.23214285714285715, 0.23214285714285715, - 0.23214285714285715, 0.23214285714285715, - 0.26785714285714285, 0.23214285714285715, - 0.26785714285714285, 0.23214285714285715, - 0.30357142857142855, 0.23214285714285715, - 0.30357142857142855, 0.23214285714285715, - 0.3392857142857143, 0.23214285714285715, - 0.3392857142857143, 0.23214285714285715, - 0.375, 0.23214285714285715, - 0.375, 0.23214285714285715, - 0.4107142857142857, 0.23214285714285715, - 0.4107142857142857, 0.23214285714285715, - 0.44642857142857145, 0.23214285714285715, - 0.44642857142857145, 0.23214285714285715, - 0.48214285714285715, 0.23214285714285715, - 0.48214285714285715, 0.23214285714285715, - 0.5178571428571429, 0.23214285714285715, - 0.5178571428571429, 0.23214285714285715, - 0.5535714285714286, 0.23214285714285715, - 0.5535714285714286, 0.23214285714285715, - 0.5892857142857143, 0.23214285714285715, - 0.5892857142857143, 0.23214285714285715, - 0.625, 0.23214285714285715, - 0.625, 0.23214285714285715, - 0.6607142857142857, 0.23214285714285715, - 0.6607142857142857, 0.23214285714285715, - 0.6964285714285714, 0.23214285714285715, - 0.6964285714285714, 0.23214285714285715, - 0.7321428571428571, 0.23214285714285715, - 0.7321428571428571, 0.23214285714285715, - 0.7678571428571429, 0.23214285714285715, - 0.7678571428571429, 0.23214285714285715, - 0.8035714285714286, 0.23214285714285715, - 0.8035714285714286, 0.23214285714285715, - 0.8392857142857143, 0.23214285714285715, - 0.8392857142857143, 0.23214285714285715, - 0.875, 0.23214285714285715, - 0.875, 0.23214285714285715, - 0.9107142857142857, 0.23214285714285715, - 0.9107142857142857, 0.23214285714285715, - 0.9464285714285714, 0.23214285714285715, - 0.9464285714285714, 0.23214285714285715, - 0.9821428571428571, 0.23214285714285715, - 0.9821428571428571, 0.23214285714285715, - 0.017857142857142856, 0.26785714285714285, - 0.017857142857142856, 0.26785714285714285, - 0.05357142857142857, 0.26785714285714285, - 0.05357142857142857, 0.26785714285714285, - 0.08928571428571429, 0.26785714285714285, - 0.08928571428571429, 0.26785714285714285, - 0.125, 0.26785714285714285, - 0.125, 0.26785714285714285, - 0.16071428571428573, 0.26785714285714285, - 0.16071428571428573, 0.26785714285714285, - 0.19642857142857142, 0.26785714285714285, - 0.19642857142857142, 0.26785714285714285, - 0.23214285714285715, 0.26785714285714285, - 0.23214285714285715, 0.26785714285714285, - 0.26785714285714285, 0.26785714285714285, - 0.26785714285714285, 0.26785714285714285, - 0.30357142857142855, 0.26785714285714285, - 0.30357142857142855, 0.26785714285714285, - 0.3392857142857143, 0.26785714285714285, - 0.3392857142857143, 0.26785714285714285, - 0.375, 0.26785714285714285, - 0.375, 0.26785714285714285, - 0.4107142857142857, 0.26785714285714285, - 0.4107142857142857, 0.26785714285714285, - 0.44642857142857145, 0.26785714285714285, - 0.44642857142857145, 0.26785714285714285, - 0.48214285714285715, 0.26785714285714285, - 0.48214285714285715, 0.26785714285714285, - 0.5178571428571429, 0.26785714285714285, - 0.5178571428571429, 0.26785714285714285, - 0.5535714285714286, 0.26785714285714285, - 0.5535714285714286, 0.26785714285714285, - 0.5892857142857143, 0.26785714285714285, - 0.5892857142857143, 0.26785714285714285, - 0.625, 0.26785714285714285, - 0.625, 0.26785714285714285, - 0.6607142857142857, 0.26785714285714285, - 0.6607142857142857, 0.26785714285714285, - 0.6964285714285714, 0.26785714285714285, - 0.6964285714285714, 0.26785714285714285, - 0.7321428571428571, 0.26785714285714285, - 0.7321428571428571, 0.26785714285714285, - 0.7678571428571429, 0.26785714285714285, - 0.7678571428571429, 0.26785714285714285, - 0.8035714285714286, 0.26785714285714285, - 0.8035714285714286, 0.26785714285714285, - 0.8392857142857143, 0.26785714285714285, - 0.8392857142857143, 0.26785714285714285, - 0.875, 0.26785714285714285, - 0.875, 0.26785714285714285, - 0.9107142857142857, 0.26785714285714285, - 0.9107142857142857, 0.26785714285714285, - 0.9464285714285714, 0.26785714285714285, - 0.9464285714285714, 0.26785714285714285, - 0.9821428571428571, 0.26785714285714285, - 0.9821428571428571, 0.26785714285714285, - 0.017857142857142856, 0.30357142857142855, - 0.017857142857142856, 0.30357142857142855, - 0.05357142857142857, 0.30357142857142855, - 0.05357142857142857, 0.30357142857142855, - 0.08928571428571429, 0.30357142857142855, - 0.08928571428571429, 0.30357142857142855, - 0.125, 0.30357142857142855, - 0.125, 0.30357142857142855, - 0.16071428571428573, 0.30357142857142855, - 0.16071428571428573, 0.30357142857142855, - 0.19642857142857142, 0.30357142857142855, - 0.19642857142857142, 0.30357142857142855, - 0.23214285714285715, 0.30357142857142855, - 0.23214285714285715, 0.30357142857142855, - 0.26785714285714285, 0.30357142857142855, - 0.26785714285714285, 0.30357142857142855, - 0.30357142857142855, 0.30357142857142855, - 0.30357142857142855, 0.30357142857142855, - 0.3392857142857143, 0.30357142857142855, - 0.3392857142857143, 0.30357142857142855, - 0.375, 0.30357142857142855, - 0.375, 0.30357142857142855, - 0.4107142857142857, 0.30357142857142855, - 0.4107142857142857, 0.30357142857142855, - 0.44642857142857145, 0.30357142857142855, - 0.44642857142857145, 0.30357142857142855, - 0.48214285714285715, 0.30357142857142855, - 0.48214285714285715, 0.30357142857142855, - 0.5178571428571429, 0.30357142857142855, - 0.5178571428571429, 0.30357142857142855, - 0.5535714285714286, 0.30357142857142855, - 0.5535714285714286, 0.30357142857142855, - 0.5892857142857143, 0.30357142857142855, - 0.5892857142857143, 0.30357142857142855, - 0.625, 0.30357142857142855, - 0.625, 0.30357142857142855, - 0.6607142857142857, 0.30357142857142855, - 0.6607142857142857, 0.30357142857142855, - 0.6964285714285714, 0.30357142857142855, - 0.6964285714285714, 0.30357142857142855, - 0.7321428571428571, 0.30357142857142855, - 0.7321428571428571, 0.30357142857142855, - 0.7678571428571429, 0.30357142857142855, - 0.7678571428571429, 0.30357142857142855, - 0.8035714285714286, 0.30357142857142855, - 0.8035714285714286, 0.30357142857142855, - 0.8392857142857143, 0.30357142857142855, - 0.8392857142857143, 0.30357142857142855, - 0.875, 0.30357142857142855, - 0.875, 0.30357142857142855, - 0.9107142857142857, 0.30357142857142855, - 0.9107142857142857, 0.30357142857142855, - 0.9464285714285714, 0.30357142857142855, - 0.9464285714285714, 0.30357142857142855, - 0.9821428571428571, 0.30357142857142855, - 0.9821428571428571, 0.30357142857142855, - 0.017857142857142856, 0.3392857142857143, - 0.017857142857142856, 0.3392857142857143, - 0.05357142857142857, 0.3392857142857143, - 0.05357142857142857, 0.3392857142857143, - 0.08928571428571429, 0.3392857142857143, - 0.08928571428571429, 0.3392857142857143, - 0.125, 0.3392857142857143, - 0.125, 0.3392857142857143, - 0.16071428571428573, 0.3392857142857143, - 0.16071428571428573, 0.3392857142857143, - 0.19642857142857142, 0.3392857142857143, - 0.19642857142857142, 0.3392857142857143, - 0.23214285714285715, 0.3392857142857143, - 0.23214285714285715, 0.3392857142857143, - 0.26785714285714285, 0.3392857142857143, - 0.26785714285714285, 0.3392857142857143, - 0.30357142857142855, 0.3392857142857143, - 0.30357142857142855, 0.3392857142857143, - 0.3392857142857143, 0.3392857142857143, - 0.3392857142857143, 0.3392857142857143, - 0.375, 0.3392857142857143, - 0.375, 0.3392857142857143, - 0.4107142857142857, 0.3392857142857143, - 0.4107142857142857, 0.3392857142857143, - 0.44642857142857145, 0.3392857142857143, - 0.44642857142857145, 0.3392857142857143, - 0.48214285714285715, 0.3392857142857143, - 0.48214285714285715, 0.3392857142857143, - 0.5178571428571429, 0.3392857142857143, - 0.5178571428571429, 0.3392857142857143, - 0.5535714285714286, 0.3392857142857143, - 0.5535714285714286, 0.3392857142857143, - 0.5892857142857143, 0.3392857142857143, - 0.5892857142857143, 0.3392857142857143, - 0.625, 0.3392857142857143, - 0.625, 0.3392857142857143, - 0.6607142857142857, 0.3392857142857143, - 0.6607142857142857, 0.3392857142857143, - 0.6964285714285714, 0.3392857142857143, - 0.6964285714285714, 0.3392857142857143, - 0.7321428571428571, 0.3392857142857143, - 0.7321428571428571, 0.3392857142857143, - 0.7678571428571429, 0.3392857142857143, - 0.7678571428571429, 0.3392857142857143, - 0.8035714285714286, 0.3392857142857143, - 0.8035714285714286, 0.3392857142857143, - 0.8392857142857143, 0.3392857142857143, - 0.8392857142857143, 0.3392857142857143, - 0.875, 0.3392857142857143, - 0.875, 0.3392857142857143, - 0.9107142857142857, 0.3392857142857143, - 0.9107142857142857, 0.3392857142857143, - 0.9464285714285714, 0.3392857142857143, - 0.9464285714285714, 0.3392857142857143, - 0.9821428571428571, 0.3392857142857143, - 0.9821428571428571, 0.3392857142857143, - 0.017857142857142856, 0.375, - 0.017857142857142856, 0.375, - 0.05357142857142857, 0.375, - 0.05357142857142857, 0.375, - 0.08928571428571429, 0.375, - 0.08928571428571429, 0.375, - 0.125, 0.375, - 0.125, 0.375, - 0.16071428571428573, 0.375, - 0.16071428571428573, 0.375, - 0.19642857142857142, 0.375, - 0.19642857142857142, 0.375, - 0.23214285714285715, 0.375, - 0.23214285714285715, 0.375, - 0.26785714285714285, 0.375, - 0.26785714285714285, 0.375, - 0.30357142857142855, 0.375, - 0.30357142857142855, 0.375, - 0.3392857142857143, 0.375, - 0.3392857142857143, 0.375, - 0.375, 0.375, - 0.375, 0.375, - 0.4107142857142857, 0.375, - 0.4107142857142857, 0.375, - 0.44642857142857145, 0.375, - 0.44642857142857145, 0.375, - 0.48214285714285715, 0.375, - 0.48214285714285715, 0.375, - 0.5178571428571429, 0.375, - 0.5178571428571429, 0.375, - 0.5535714285714286, 0.375, - 0.5535714285714286, 0.375, - 0.5892857142857143, 0.375, - 0.5892857142857143, 0.375, - 0.625, 0.375, - 0.625, 0.375, - 0.6607142857142857, 0.375, - 0.6607142857142857, 0.375, - 0.6964285714285714, 0.375, - 0.6964285714285714, 0.375, - 0.7321428571428571, 0.375, - 0.7321428571428571, 0.375, - 0.7678571428571429, 0.375, - 0.7678571428571429, 0.375, - 0.8035714285714286, 0.375, - 0.8035714285714286, 0.375, - 0.8392857142857143, 0.375, - 0.8392857142857143, 0.375, - 0.875, 0.375, - 0.875, 0.375, - 0.9107142857142857, 0.375, - 0.9107142857142857, 0.375, - 0.9464285714285714, 0.375, - 0.9464285714285714, 0.375, - 0.9821428571428571, 0.375, - 0.9821428571428571, 0.375, - 0.017857142857142856, 0.4107142857142857, - 0.017857142857142856, 0.4107142857142857, - 0.05357142857142857, 0.4107142857142857, - 0.05357142857142857, 0.4107142857142857, - 0.08928571428571429, 0.4107142857142857, - 0.08928571428571429, 0.4107142857142857, - 0.125, 0.4107142857142857, - 0.125, 0.4107142857142857, - 0.16071428571428573, 0.4107142857142857, - 0.16071428571428573, 0.4107142857142857, - 0.19642857142857142, 0.4107142857142857, - 0.19642857142857142, 0.4107142857142857, - 0.23214285714285715, 0.4107142857142857, - 0.23214285714285715, 0.4107142857142857, - 0.26785714285714285, 0.4107142857142857, - 0.26785714285714285, 0.4107142857142857, - 0.30357142857142855, 0.4107142857142857, - 0.30357142857142855, 0.4107142857142857, - 0.3392857142857143, 0.4107142857142857, - 0.3392857142857143, 0.4107142857142857, - 0.375, 0.4107142857142857, - 0.375, 0.4107142857142857, - 0.4107142857142857, 0.4107142857142857, - 0.4107142857142857, 0.4107142857142857, - 0.44642857142857145, 0.4107142857142857, - 0.44642857142857145, 0.4107142857142857, - 0.48214285714285715, 0.4107142857142857, - 0.48214285714285715, 0.4107142857142857, - 0.5178571428571429, 0.4107142857142857, - 0.5178571428571429, 0.4107142857142857, - 0.5535714285714286, 0.4107142857142857, - 0.5535714285714286, 0.4107142857142857, - 0.5892857142857143, 0.4107142857142857, - 0.5892857142857143, 0.4107142857142857, - 0.625, 0.4107142857142857, - 0.625, 0.4107142857142857, - 0.6607142857142857, 0.4107142857142857, - 0.6607142857142857, 0.4107142857142857, - 0.6964285714285714, 0.4107142857142857, - 0.6964285714285714, 0.4107142857142857, - 0.7321428571428571, 0.4107142857142857, - 0.7321428571428571, 0.4107142857142857, - 0.7678571428571429, 0.4107142857142857, - 0.7678571428571429, 0.4107142857142857, - 0.8035714285714286, 0.4107142857142857, - 0.8035714285714286, 0.4107142857142857, - 0.8392857142857143, 0.4107142857142857, - 0.8392857142857143, 0.4107142857142857, - 0.875, 0.4107142857142857, - 0.875, 0.4107142857142857, - 0.9107142857142857, 0.4107142857142857, - 0.9107142857142857, 0.4107142857142857, - 0.9464285714285714, 0.4107142857142857, - 0.9464285714285714, 0.4107142857142857, - 0.9821428571428571, 0.4107142857142857, - 0.9821428571428571, 0.4107142857142857, - 0.017857142857142856, 0.44642857142857145, - 0.017857142857142856, 0.44642857142857145, - 0.05357142857142857, 0.44642857142857145, - 0.05357142857142857, 0.44642857142857145, - 0.08928571428571429, 0.44642857142857145, - 0.08928571428571429, 0.44642857142857145, - 0.125, 0.44642857142857145, - 0.125, 0.44642857142857145, - 0.16071428571428573, 0.44642857142857145, - 0.16071428571428573, 0.44642857142857145, - 0.19642857142857142, 0.44642857142857145, - 0.19642857142857142, 0.44642857142857145, - 0.23214285714285715, 0.44642857142857145, - 0.23214285714285715, 0.44642857142857145, - 0.26785714285714285, 0.44642857142857145, - 0.26785714285714285, 0.44642857142857145, - 0.30357142857142855, 0.44642857142857145, - 0.30357142857142855, 0.44642857142857145, - 0.3392857142857143, 0.44642857142857145, - 0.3392857142857143, 0.44642857142857145, - 0.375, 0.44642857142857145, - 0.375, 0.44642857142857145, - 0.4107142857142857, 0.44642857142857145, - 0.4107142857142857, 0.44642857142857145, - 0.44642857142857145, 0.44642857142857145, - 0.44642857142857145, 0.44642857142857145, - 0.48214285714285715, 0.44642857142857145, - 0.48214285714285715, 0.44642857142857145, - 0.5178571428571429, 0.44642857142857145, - 0.5178571428571429, 0.44642857142857145, - 0.5535714285714286, 0.44642857142857145, - 0.5535714285714286, 0.44642857142857145, - 0.5892857142857143, 0.44642857142857145, - 0.5892857142857143, 0.44642857142857145, - 0.625, 0.44642857142857145, - 0.625, 0.44642857142857145, - 0.6607142857142857, 0.44642857142857145, - 0.6607142857142857, 0.44642857142857145, - 0.6964285714285714, 0.44642857142857145, - 0.6964285714285714, 0.44642857142857145, - 0.7321428571428571, 0.44642857142857145, - 0.7321428571428571, 0.44642857142857145, - 0.7678571428571429, 0.44642857142857145, - 0.7678571428571429, 0.44642857142857145, - 0.8035714285714286, 0.44642857142857145, - 0.8035714285714286, 0.44642857142857145, - 0.8392857142857143, 0.44642857142857145, - 0.8392857142857143, 0.44642857142857145, - 0.875, 0.44642857142857145, - 0.875, 0.44642857142857145, - 0.9107142857142857, 0.44642857142857145, - 0.9107142857142857, 0.44642857142857145, - 0.9464285714285714, 0.44642857142857145, - 0.9464285714285714, 0.44642857142857145, - 0.9821428571428571, 0.44642857142857145, - 0.9821428571428571, 0.44642857142857145, - 0.017857142857142856, 0.48214285714285715, - 0.017857142857142856, 0.48214285714285715, - 0.05357142857142857, 0.48214285714285715, - 0.05357142857142857, 0.48214285714285715, - 0.08928571428571429, 0.48214285714285715, - 0.08928571428571429, 0.48214285714285715, - 0.125, 0.48214285714285715, - 0.125, 0.48214285714285715, - 0.16071428571428573, 0.48214285714285715, - 0.16071428571428573, 0.48214285714285715, - 0.19642857142857142, 0.48214285714285715, - 0.19642857142857142, 0.48214285714285715, - 0.23214285714285715, 0.48214285714285715, - 0.23214285714285715, 0.48214285714285715, - 0.26785714285714285, 0.48214285714285715, - 0.26785714285714285, 0.48214285714285715, - 0.30357142857142855, 0.48214285714285715, - 0.30357142857142855, 0.48214285714285715, - 0.3392857142857143, 0.48214285714285715, - 0.3392857142857143, 0.48214285714285715, - 0.375, 0.48214285714285715, - 0.375, 0.48214285714285715, - 0.4107142857142857, 0.48214285714285715, - 0.4107142857142857, 0.48214285714285715, - 0.44642857142857145, 0.48214285714285715, - 0.44642857142857145, 0.48214285714285715, - 0.48214285714285715, 0.48214285714285715, - 0.48214285714285715, 0.48214285714285715, - 0.5178571428571429, 0.48214285714285715, - 0.5178571428571429, 0.48214285714285715, - 0.5535714285714286, 0.48214285714285715, - 0.5535714285714286, 0.48214285714285715, - 0.5892857142857143, 0.48214285714285715, - 0.5892857142857143, 0.48214285714285715, - 0.625, 0.48214285714285715, - 0.625, 0.48214285714285715, - 0.6607142857142857, 0.48214285714285715, - 0.6607142857142857, 0.48214285714285715, - 0.6964285714285714, 0.48214285714285715, - 0.6964285714285714, 0.48214285714285715, - 0.7321428571428571, 0.48214285714285715, - 0.7321428571428571, 0.48214285714285715, - 0.7678571428571429, 0.48214285714285715, - 0.7678571428571429, 0.48214285714285715, - 0.8035714285714286, 0.48214285714285715, - 0.8035714285714286, 0.48214285714285715, - 0.8392857142857143, 0.48214285714285715, - 0.8392857142857143, 0.48214285714285715, - 0.875, 0.48214285714285715, - 0.875, 0.48214285714285715, - 0.9107142857142857, 0.48214285714285715, - 0.9107142857142857, 0.48214285714285715, - 0.9464285714285714, 0.48214285714285715, - 0.9464285714285714, 0.48214285714285715, - 0.9821428571428571, 0.48214285714285715, - 0.9821428571428571, 0.48214285714285715, - 0.017857142857142856, 0.5178571428571429, - 0.017857142857142856, 0.5178571428571429, - 0.05357142857142857, 0.5178571428571429, - 0.05357142857142857, 0.5178571428571429, - 0.08928571428571429, 0.5178571428571429, - 0.08928571428571429, 0.5178571428571429, - 0.125, 0.5178571428571429, - 0.125, 0.5178571428571429, - 0.16071428571428573, 0.5178571428571429, - 0.16071428571428573, 0.5178571428571429, - 0.19642857142857142, 0.5178571428571429, - 0.19642857142857142, 0.5178571428571429, - 0.23214285714285715, 0.5178571428571429, - 0.23214285714285715, 0.5178571428571429, - 0.26785714285714285, 0.5178571428571429, - 0.26785714285714285, 0.5178571428571429, - 0.30357142857142855, 0.5178571428571429, - 0.30357142857142855, 0.5178571428571429, - 0.3392857142857143, 0.5178571428571429, - 0.3392857142857143, 0.5178571428571429, - 0.375, 0.5178571428571429, - 0.375, 0.5178571428571429, - 0.4107142857142857, 0.5178571428571429, - 0.4107142857142857, 0.5178571428571429, - 0.44642857142857145, 0.5178571428571429, - 0.44642857142857145, 0.5178571428571429, - 0.48214285714285715, 0.5178571428571429, - 0.48214285714285715, 0.5178571428571429, - 0.5178571428571429, 0.5178571428571429, - 0.5178571428571429, 0.5178571428571429, - 0.5535714285714286, 0.5178571428571429, - 0.5535714285714286, 0.5178571428571429, - 0.5892857142857143, 0.5178571428571429, - 0.5892857142857143, 0.5178571428571429, - 0.625, 0.5178571428571429, - 0.625, 0.5178571428571429, - 0.6607142857142857, 0.5178571428571429, - 0.6607142857142857, 0.5178571428571429, - 0.6964285714285714, 0.5178571428571429, - 0.6964285714285714, 0.5178571428571429, - 0.7321428571428571, 0.5178571428571429, - 0.7321428571428571, 0.5178571428571429, - 0.7678571428571429, 0.5178571428571429, - 0.7678571428571429, 0.5178571428571429, - 0.8035714285714286, 0.5178571428571429, - 0.8035714285714286, 0.5178571428571429, - 0.8392857142857143, 0.5178571428571429, - 0.8392857142857143, 0.5178571428571429, - 0.875, 0.5178571428571429, - 0.875, 0.5178571428571429, - 0.9107142857142857, 0.5178571428571429, - 0.9107142857142857, 0.5178571428571429, - 0.9464285714285714, 0.5178571428571429, - 0.9464285714285714, 0.5178571428571429, - 0.9821428571428571, 0.5178571428571429, - 0.9821428571428571, 0.5178571428571429, - 0.017857142857142856, 0.5535714285714286, - 0.017857142857142856, 0.5535714285714286, - 0.05357142857142857, 0.5535714285714286, - 0.05357142857142857, 0.5535714285714286, - 0.08928571428571429, 0.5535714285714286, - 0.08928571428571429, 0.5535714285714286, - 0.125, 0.5535714285714286, - 0.125, 0.5535714285714286, - 0.16071428571428573, 0.5535714285714286, - 0.16071428571428573, 0.5535714285714286, - 0.19642857142857142, 0.5535714285714286, - 0.19642857142857142, 0.5535714285714286, - 0.23214285714285715, 0.5535714285714286, - 0.23214285714285715, 0.5535714285714286, - 0.26785714285714285, 0.5535714285714286, - 0.26785714285714285, 0.5535714285714286, - 0.30357142857142855, 0.5535714285714286, - 0.30357142857142855, 0.5535714285714286, - 0.3392857142857143, 0.5535714285714286, - 0.3392857142857143, 0.5535714285714286, - 0.375, 0.5535714285714286, - 0.375, 0.5535714285714286, - 0.4107142857142857, 0.5535714285714286, - 0.4107142857142857, 0.5535714285714286, - 0.44642857142857145, 0.5535714285714286, - 0.44642857142857145, 0.5535714285714286, - 0.48214285714285715, 0.5535714285714286, - 0.48214285714285715, 0.5535714285714286, - 0.5178571428571429, 0.5535714285714286, - 0.5178571428571429, 0.5535714285714286, - 0.5535714285714286, 0.5535714285714286, - 0.5535714285714286, 0.5535714285714286, - 0.5892857142857143, 0.5535714285714286, - 0.5892857142857143, 0.5535714285714286, - 0.625, 0.5535714285714286, - 0.625, 0.5535714285714286, - 0.6607142857142857, 0.5535714285714286, - 0.6607142857142857, 0.5535714285714286, - 0.6964285714285714, 0.5535714285714286, - 0.6964285714285714, 0.5535714285714286, - 0.7321428571428571, 0.5535714285714286, - 0.7321428571428571, 0.5535714285714286, - 0.7678571428571429, 0.5535714285714286, - 0.7678571428571429, 0.5535714285714286, - 0.8035714285714286, 0.5535714285714286, - 0.8035714285714286, 0.5535714285714286, - 0.8392857142857143, 0.5535714285714286, - 0.8392857142857143, 0.5535714285714286, - 0.875, 0.5535714285714286, - 0.875, 0.5535714285714286, - 0.9107142857142857, 0.5535714285714286, - 0.9107142857142857, 0.5535714285714286, - 0.9464285714285714, 0.5535714285714286, - 0.9464285714285714, 0.5535714285714286, - 0.9821428571428571, 0.5535714285714286, - 0.9821428571428571, 0.5535714285714286, - 0.017857142857142856, 0.5892857142857143, - 0.017857142857142856, 0.5892857142857143, - 0.05357142857142857, 0.5892857142857143, - 0.05357142857142857, 0.5892857142857143, - 0.08928571428571429, 0.5892857142857143, - 0.08928571428571429, 0.5892857142857143, - 0.125, 0.5892857142857143, - 0.125, 0.5892857142857143, - 0.16071428571428573, 0.5892857142857143, - 0.16071428571428573, 0.5892857142857143, - 0.19642857142857142, 0.5892857142857143, - 0.19642857142857142, 0.5892857142857143, - 0.23214285714285715, 0.5892857142857143, - 0.23214285714285715, 0.5892857142857143, - 0.26785714285714285, 0.5892857142857143, - 0.26785714285714285, 0.5892857142857143, - 0.30357142857142855, 0.5892857142857143, - 0.30357142857142855, 0.5892857142857143, - 0.3392857142857143, 0.5892857142857143, - 0.3392857142857143, 0.5892857142857143, - 0.375, 0.5892857142857143, - 0.375, 0.5892857142857143, - 0.4107142857142857, 0.5892857142857143, - 0.4107142857142857, 0.5892857142857143, - 0.44642857142857145, 0.5892857142857143, - 0.44642857142857145, 0.5892857142857143, - 0.48214285714285715, 0.5892857142857143, - 0.48214285714285715, 0.5892857142857143, - 0.5178571428571429, 0.5892857142857143, - 0.5178571428571429, 0.5892857142857143, - 0.5535714285714286, 0.5892857142857143, - 0.5535714285714286, 0.5892857142857143, - 0.5892857142857143, 0.5892857142857143, - 0.5892857142857143, 0.5892857142857143, - 0.625, 0.5892857142857143, - 0.625, 0.5892857142857143, - 0.6607142857142857, 0.5892857142857143, - 0.6607142857142857, 0.5892857142857143, - 0.6964285714285714, 0.5892857142857143, - 0.6964285714285714, 0.5892857142857143, - 0.7321428571428571, 0.5892857142857143, - 0.7321428571428571, 0.5892857142857143, - 0.7678571428571429, 0.5892857142857143, - 0.7678571428571429, 0.5892857142857143, - 0.8035714285714286, 0.5892857142857143, - 0.8035714285714286, 0.5892857142857143, - 0.8392857142857143, 0.5892857142857143, - 0.8392857142857143, 0.5892857142857143, - 0.875, 0.5892857142857143, - 0.875, 0.5892857142857143, - 0.9107142857142857, 0.5892857142857143, - 0.9107142857142857, 0.5892857142857143, - 0.9464285714285714, 0.5892857142857143, - 0.9464285714285714, 0.5892857142857143, - 0.9821428571428571, 0.5892857142857143, - 0.9821428571428571, 0.5892857142857143, - 0.017857142857142856, 0.625, - 0.017857142857142856, 0.625, - 0.05357142857142857, 0.625, - 0.05357142857142857, 0.625, - 0.08928571428571429, 0.625, - 0.08928571428571429, 0.625, - 0.125, 0.625, - 0.125, 0.625, - 0.16071428571428573, 0.625, - 0.16071428571428573, 0.625, - 0.19642857142857142, 0.625, - 0.19642857142857142, 0.625, - 0.23214285714285715, 0.625, - 0.23214285714285715, 0.625, - 0.26785714285714285, 0.625, - 0.26785714285714285, 0.625, - 0.30357142857142855, 0.625, - 0.30357142857142855, 0.625, - 0.3392857142857143, 0.625, - 0.3392857142857143, 0.625, - 0.375, 0.625, - 0.375, 0.625, - 0.4107142857142857, 0.625, - 0.4107142857142857, 0.625, - 0.44642857142857145, 0.625, - 0.44642857142857145, 0.625, - 0.48214285714285715, 0.625, - 0.48214285714285715, 0.625, - 0.5178571428571429, 0.625, - 0.5178571428571429, 0.625, - 0.5535714285714286, 0.625, - 0.5535714285714286, 0.625, - 0.5892857142857143, 0.625, - 0.5892857142857143, 0.625, - 0.625, 0.625, - 0.625, 0.625, - 0.6607142857142857, 0.625, - 0.6607142857142857, 0.625, - 0.6964285714285714, 0.625, - 0.6964285714285714, 0.625, - 0.7321428571428571, 0.625, - 0.7321428571428571, 0.625, - 0.7678571428571429, 0.625, - 0.7678571428571429, 0.625, - 0.8035714285714286, 0.625, - 0.8035714285714286, 0.625, - 0.8392857142857143, 0.625, - 0.8392857142857143, 0.625, - 0.875, 0.625, - 0.875, 0.625, - 0.9107142857142857, 0.625, - 0.9107142857142857, 0.625, - 0.9464285714285714, 0.625, - 0.9464285714285714, 0.625, - 0.9821428571428571, 0.625, - 0.9821428571428571, 0.625, - 0.017857142857142856, 0.6607142857142857, - 0.017857142857142856, 0.6607142857142857, - 0.05357142857142857, 0.6607142857142857, - 0.05357142857142857, 0.6607142857142857, - 0.08928571428571429, 0.6607142857142857, - 0.08928571428571429, 0.6607142857142857, - 0.125, 0.6607142857142857, - 0.125, 0.6607142857142857, - 0.16071428571428573, 0.6607142857142857, - 0.16071428571428573, 0.6607142857142857, - 0.19642857142857142, 0.6607142857142857, - 0.19642857142857142, 0.6607142857142857, - 0.23214285714285715, 0.6607142857142857, - 0.23214285714285715, 0.6607142857142857, - 0.26785714285714285, 0.6607142857142857, - 0.26785714285714285, 0.6607142857142857, - 0.30357142857142855, 0.6607142857142857, - 0.30357142857142855, 0.6607142857142857, - 0.3392857142857143, 0.6607142857142857, - 0.3392857142857143, 0.6607142857142857, - 0.375, 0.6607142857142857, - 0.375, 0.6607142857142857, - 0.4107142857142857, 0.6607142857142857, - 0.4107142857142857, 0.6607142857142857, - 0.44642857142857145, 0.6607142857142857, - 0.44642857142857145, 0.6607142857142857, - 0.48214285714285715, 0.6607142857142857, - 0.48214285714285715, 0.6607142857142857, - 0.5178571428571429, 0.6607142857142857, - 0.5178571428571429, 0.6607142857142857, - 0.5535714285714286, 0.6607142857142857, - 0.5535714285714286, 0.6607142857142857, - 0.5892857142857143, 0.6607142857142857, - 0.5892857142857143, 0.6607142857142857, - 0.625, 0.6607142857142857, - 0.625, 0.6607142857142857, - 0.6607142857142857, 0.6607142857142857, - 0.6607142857142857, 0.6607142857142857, - 0.6964285714285714, 0.6607142857142857, - 0.6964285714285714, 0.6607142857142857, - 0.7321428571428571, 0.6607142857142857, - 0.7321428571428571, 0.6607142857142857, - 0.7678571428571429, 0.6607142857142857, - 0.7678571428571429, 0.6607142857142857, - 0.8035714285714286, 0.6607142857142857, - 0.8035714285714286, 0.6607142857142857, - 0.8392857142857143, 0.6607142857142857, - 0.8392857142857143, 0.6607142857142857, - 0.875, 0.6607142857142857, - 0.875, 0.6607142857142857, - 0.9107142857142857, 0.6607142857142857, - 0.9107142857142857, 0.6607142857142857, - 0.9464285714285714, 0.6607142857142857, - 0.9464285714285714, 0.6607142857142857, - 0.9821428571428571, 0.6607142857142857, - 0.9821428571428571, 0.6607142857142857, - 0.017857142857142856, 0.6964285714285714, - 0.017857142857142856, 0.6964285714285714, - 0.05357142857142857, 0.6964285714285714, - 0.05357142857142857, 0.6964285714285714, - 0.08928571428571429, 0.6964285714285714, - 0.08928571428571429, 0.6964285714285714, - 0.125, 0.6964285714285714, - 0.125, 0.6964285714285714, - 0.16071428571428573, 0.6964285714285714, - 0.16071428571428573, 0.6964285714285714, - 0.19642857142857142, 0.6964285714285714, - 0.19642857142857142, 0.6964285714285714, - 0.23214285714285715, 0.6964285714285714, - 0.23214285714285715, 0.6964285714285714, - 0.26785714285714285, 0.6964285714285714, - 0.26785714285714285, 0.6964285714285714, - 0.30357142857142855, 0.6964285714285714, - 0.30357142857142855, 0.6964285714285714, - 0.3392857142857143, 0.6964285714285714, - 0.3392857142857143, 0.6964285714285714, - 0.375, 0.6964285714285714, - 0.375, 0.6964285714285714, - 0.4107142857142857, 0.6964285714285714, - 0.4107142857142857, 0.6964285714285714, - 0.44642857142857145, 0.6964285714285714, - 0.44642857142857145, 0.6964285714285714, - 0.48214285714285715, 0.6964285714285714, - 0.48214285714285715, 0.6964285714285714, - 0.5178571428571429, 0.6964285714285714, - 0.5178571428571429, 0.6964285714285714, - 0.5535714285714286, 0.6964285714285714, - 0.5535714285714286, 0.6964285714285714, - 0.5892857142857143, 0.6964285714285714, - 0.5892857142857143, 0.6964285714285714, - 0.625, 0.6964285714285714, - 0.625, 0.6964285714285714, - 0.6607142857142857, 0.6964285714285714, - 0.6607142857142857, 0.6964285714285714, - 0.6964285714285714, 0.6964285714285714, - 0.6964285714285714, 0.6964285714285714, - 0.7321428571428571, 0.6964285714285714, - 0.7321428571428571, 0.6964285714285714, - 0.7678571428571429, 0.6964285714285714, - 0.7678571428571429, 0.6964285714285714, - 0.8035714285714286, 0.6964285714285714, - 0.8035714285714286, 0.6964285714285714, - 0.8392857142857143, 0.6964285714285714, - 0.8392857142857143, 0.6964285714285714, - 0.875, 0.6964285714285714, - 0.875, 0.6964285714285714, - 0.9107142857142857, 0.6964285714285714, - 0.9107142857142857, 0.6964285714285714, - 0.9464285714285714, 0.6964285714285714, - 0.9464285714285714, 0.6964285714285714, - 0.9821428571428571, 0.6964285714285714, - 0.9821428571428571, 0.6964285714285714, - 0.017857142857142856, 0.7321428571428571, - 0.017857142857142856, 0.7321428571428571, - 0.05357142857142857, 0.7321428571428571, - 0.05357142857142857, 0.7321428571428571, - 0.08928571428571429, 0.7321428571428571, - 0.08928571428571429, 0.7321428571428571, - 0.125, 0.7321428571428571, - 0.125, 0.7321428571428571, - 0.16071428571428573, 0.7321428571428571, - 0.16071428571428573, 0.7321428571428571, - 0.19642857142857142, 0.7321428571428571, - 0.19642857142857142, 0.7321428571428571, - 0.23214285714285715, 0.7321428571428571, - 0.23214285714285715, 0.7321428571428571, - 0.26785714285714285, 0.7321428571428571, - 0.26785714285714285, 0.7321428571428571, - 0.30357142857142855, 0.7321428571428571, - 0.30357142857142855, 0.7321428571428571, - 0.3392857142857143, 0.7321428571428571, - 0.3392857142857143, 0.7321428571428571, - 0.375, 0.7321428571428571, - 0.375, 0.7321428571428571, - 0.4107142857142857, 0.7321428571428571, - 0.4107142857142857, 0.7321428571428571, - 0.44642857142857145, 0.7321428571428571, - 0.44642857142857145, 0.7321428571428571, - 0.48214285714285715, 0.7321428571428571, - 0.48214285714285715, 0.7321428571428571, - 0.5178571428571429, 0.7321428571428571, - 0.5178571428571429, 0.7321428571428571, - 0.5535714285714286, 0.7321428571428571, - 0.5535714285714286, 0.7321428571428571, - 0.5892857142857143, 0.7321428571428571, - 0.5892857142857143, 0.7321428571428571, - 0.625, 0.7321428571428571, - 0.625, 0.7321428571428571, - 0.6607142857142857, 0.7321428571428571, - 0.6607142857142857, 0.7321428571428571, - 0.6964285714285714, 0.7321428571428571, - 0.6964285714285714, 0.7321428571428571, - 0.7321428571428571, 0.7321428571428571, - 0.7321428571428571, 0.7321428571428571, - 0.7678571428571429, 0.7321428571428571, - 0.7678571428571429, 0.7321428571428571, - 0.8035714285714286, 0.7321428571428571, - 0.8035714285714286, 0.7321428571428571, - 0.8392857142857143, 0.7321428571428571, - 0.8392857142857143, 0.7321428571428571, - 0.875, 0.7321428571428571, - 0.875, 0.7321428571428571, - 0.9107142857142857, 0.7321428571428571, - 0.9107142857142857, 0.7321428571428571, - 0.9464285714285714, 0.7321428571428571, - 0.9464285714285714, 0.7321428571428571, - 0.9821428571428571, 0.7321428571428571, - 0.9821428571428571, 0.7321428571428571, - 0.017857142857142856, 0.7678571428571429, - 0.017857142857142856, 0.7678571428571429, - 0.05357142857142857, 0.7678571428571429, - 0.05357142857142857, 0.7678571428571429, - 0.08928571428571429, 0.7678571428571429, - 0.08928571428571429, 0.7678571428571429, - 0.125, 0.7678571428571429, - 0.125, 0.7678571428571429, - 0.16071428571428573, 0.7678571428571429, - 0.16071428571428573, 0.7678571428571429, - 0.19642857142857142, 0.7678571428571429, - 0.19642857142857142, 0.7678571428571429, - 0.23214285714285715, 0.7678571428571429, - 0.23214285714285715, 0.7678571428571429, - 0.26785714285714285, 0.7678571428571429, - 0.26785714285714285, 0.7678571428571429, - 0.30357142857142855, 0.7678571428571429, - 0.30357142857142855, 0.7678571428571429, - 0.3392857142857143, 0.7678571428571429, - 0.3392857142857143, 0.7678571428571429, - 0.375, 0.7678571428571429, - 0.375, 0.7678571428571429, - 0.4107142857142857, 0.7678571428571429, - 0.4107142857142857, 0.7678571428571429, - 0.44642857142857145, 0.7678571428571429, - 0.44642857142857145, 0.7678571428571429, - 0.48214285714285715, 0.7678571428571429, - 0.48214285714285715, 0.7678571428571429, - 0.5178571428571429, 0.7678571428571429, - 0.5178571428571429, 0.7678571428571429, - 0.5535714285714286, 0.7678571428571429, - 0.5535714285714286, 0.7678571428571429, - 0.5892857142857143, 0.7678571428571429, - 0.5892857142857143, 0.7678571428571429, - 0.625, 0.7678571428571429, - 0.625, 0.7678571428571429, - 0.6607142857142857, 0.7678571428571429, - 0.6607142857142857, 0.7678571428571429, - 0.6964285714285714, 0.7678571428571429, - 0.6964285714285714, 0.7678571428571429, - 0.7321428571428571, 0.7678571428571429, - 0.7321428571428571, 0.7678571428571429, - 0.7678571428571429, 0.7678571428571429, - 0.7678571428571429, 0.7678571428571429, - 0.8035714285714286, 0.7678571428571429, - 0.8035714285714286, 0.7678571428571429, - 0.8392857142857143, 0.7678571428571429, - 0.8392857142857143, 0.7678571428571429, - 0.875, 0.7678571428571429, - 0.875, 0.7678571428571429, - 0.9107142857142857, 0.7678571428571429, - 0.9107142857142857, 0.7678571428571429, - 0.9464285714285714, 0.7678571428571429, - 0.9464285714285714, 0.7678571428571429, - 0.9821428571428571, 0.7678571428571429, - 0.9821428571428571, 0.7678571428571429, - 0.017857142857142856, 0.8035714285714286, - 0.017857142857142856, 0.8035714285714286, - 0.05357142857142857, 0.8035714285714286, - 0.05357142857142857, 0.8035714285714286, - 0.08928571428571429, 0.8035714285714286, - 0.08928571428571429, 0.8035714285714286, - 0.125, 0.8035714285714286, - 0.125, 0.8035714285714286, - 0.16071428571428573, 0.8035714285714286, - 0.16071428571428573, 0.8035714285714286, - 0.19642857142857142, 0.8035714285714286, - 0.19642857142857142, 0.8035714285714286, - 0.23214285714285715, 0.8035714285714286, - 0.23214285714285715, 0.8035714285714286, - 0.26785714285714285, 0.8035714285714286, - 0.26785714285714285, 0.8035714285714286, - 0.30357142857142855, 0.8035714285714286, - 0.30357142857142855, 0.8035714285714286, - 0.3392857142857143, 0.8035714285714286, - 0.3392857142857143, 0.8035714285714286, - 0.375, 0.8035714285714286, - 0.375, 0.8035714285714286, - 0.4107142857142857, 0.8035714285714286, - 0.4107142857142857, 0.8035714285714286, - 0.44642857142857145, 0.8035714285714286, - 0.44642857142857145, 0.8035714285714286, - 0.48214285714285715, 0.8035714285714286, - 0.48214285714285715, 0.8035714285714286, - 0.5178571428571429, 0.8035714285714286, - 0.5178571428571429, 0.8035714285714286, - 0.5535714285714286, 0.8035714285714286, - 0.5535714285714286, 0.8035714285714286, - 0.5892857142857143, 0.8035714285714286, - 0.5892857142857143, 0.8035714285714286, - 0.625, 0.8035714285714286, - 0.625, 0.8035714285714286, - 0.6607142857142857, 0.8035714285714286, - 0.6607142857142857, 0.8035714285714286, - 0.6964285714285714, 0.8035714285714286, - 0.6964285714285714, 0.8035714285714286, - 0.7321428571428571, 0.8035714285714286, - 0.7321428571428571, 0.8035714285714286, - 0.7678571428571429, 0.8035714285714286, - 0.7678571428571429, 0.8035714285714286, - 0.8035714285714286, 0.8035714285714286, - 0.8035714285714286, 0.8035714285714286, - 0.8392857142857143, 0.8035714285714286, - 0.8392857142857143, 0.8035714285714286, - 0.875, 0.8035714285714286, - 0.875, 0.8035714285714286, - 0.9107142857142857, 0.8035714285714286, - 0.9107142857142857, 0.8035714285714286, - 0.9464285714285714, 0.8035714285714286, - 0.9464285714285714, 0.8035714285714286, - 0.9821428571428571, 0.8035714285714286, - 0.9821428571428571, 0.8035714285714286, - 0.017857142857142856, 0.8392857142857143, - 0.017857142857142856, 0.8392857142857143, - 0.05357142857142857, 0.8392857142857143, - 0.05357142857142857, 0.8392857142857143, - 0.08928571428571429, 0.8392857142857143, - 0.08928571428571429, 0.8392857142857143, - 0.125, 0.8392857142857143, - 0.125, 0.8392857142857143, - 0.16071428571428573, 0.8392857142857143, - 0.16071428571428573, 0.8392857142857143, - 0.19642857142857142, 0.8392857142857143, - 0.19642857142857142, 0.8392857142857143, - 0.23214285714285715, 0.8392857142857143, - 0.23214285714285715, 0.8392857142857143, - 0.26785714285714285, 0.8392857142857143, - 0.26785714285714285, 0.8392857142857143, - 0.30357142857142855, 0.8392857142857143, - 0.30357142857142855, 0.8392857142857143, - 0.3392857142857143, 0.8392857142857143, - 0.3392857142857143, 0.8392857142857143, - 0.375, 0.8392857142857143, - 0.375, 0.8392857142857143, - 0.4107142857142857, 0.8392857142857143, - 0.4107142857142857, 0.8392857142857143, - 0.44642857142857145, 0.8392857142857143, - 0.44642857142857145, 0.8392857142857143, - 0.48214285714285715, 0.8392857142857143, - 0.48214285714285715, 0.8392857142857143, - 0.5178571428571429, 0.8392857142857143, - 0.5178571428571429, 0.8392857142857143, - 0.5535714285714286, 0.8392857142857143, - 0.5535714285714286, 0.8392857142857143, - 0.5892857142857143, 0.8392857142857143, - 0.5892857142857143, 0.8392857142857143, - 0.625, 0.8392857142857143, - 0.625, 0.8392857142857143, - 0.6607142857142857, 0.8392857142857143, - 0.6607142857142857, 0.8392857142857143, - 0.6964285714285714, 0.8392857142857143, - 0.6964285714285714, 0.8392857142857143, - 0.7321428571428571, 0.8392857142857143, - 0.7321428571428571, 0.8392857142857143, - 0.7678571428571429, 0.8392857142857143, - 0.7678571428571429, 0.8392857142857143, - 0.8035714285714286, 0.8392857142857143, - 0.8035714285714286, 0.8392857142857143, - 0.8392857142857143, 0.8392857142857143, - 0.8392857142857143, 0.8392857142857143, - 0.875, 0.8392857142857143, - 0.875, 0.8392857142857143, - 0.9107142857142857, 0.8392857142857143, - 0.9107142857142857, 0.8392857142857143, - 0.9464285714285714, 0.8392857142857143, - 0.9464285714285714, 0.8392857142857143, - 0.9821428571428571, 0.8392857142857143, - 0.9821428571428571, 0.8392857142857143, - 0.017857142857142856, 0.875, - 0.017857142857142856, 0.875, - 0.05357142857142857, 0.875, - 0.05357142857142857, 0.875, - 0.08928571428571429, 0.875, - 0.08928571428571429, 0.875, - 0.125, 0.875, - 0.125, 0.875, - 0.16071428571428573, 0.875, - 0.16071428571428573, 0.875, - 0.19642857142857142, 0.875, - 0.19642857142857142, 0.875, - 0.23214285714285715, 0.875, - 0.23214285714285715, 0.875, - 0.26785714285714285, 0.875, - 0.26785714285714285, 0.875, - 0.30357142857142855, 0.875, - 0.30357142857142855, 0.875, - 0.3392857142857143, 0.875, - 0.3392857142857143, 0.875, - 0.375, 0.875, - 0.375, 0.875, - 0.4107142857142857, 0.875, - 0.4107142857142857, 0.875, - 0.44642857142857145, 0.875, - 0.44642857142857145, 0.875, - 0.48214285714285715, 0.875, - 0.48214285714285715, 0.875, - 0.5178571428571429, 0.875, - 0.5178571428571429, 0.875, - 0.5535714285714286, 0.875, - 0.5535714285714286, 0.875, - 0.5892857142857143, 0.875, - 0.5892857142857143, 0.875, - 0.625, 0.875, - 0.625, 0.875, - 0.6607142857142857, 0.875, - 0.6607142857142857, 0.875, - 0.6964285714285714, 0.875, - 0.6964285714285714, 0.875, - 0.7321428571428571, 0.875, - 0.7321428571428571, 0.875, - 0.7678571428571429, 0.875, - 0.7678571428571429, 0.875, - 0.8035714285714286, 0.875, - 0.8035714285714286, 0.875, - 0.8392857142857143, 0.875, - 0.8392857142857143, 0.875, - 0.875, 0.875, - 0.875, 0.875, - 0.9107142857142857, 0.875, - 0.9107142857142857, 0.875, - 0.9464285714285714, 0.875, - 0.9464285714285714, 0.875, - 0.9821428571428571, 0.875, - 0.9821428571428571, 0.875, - 0.017857142857142856, 0.9107142857142857, - 0.017857142857142856, 0.9107142857142857, - 0.05357142857142857, 0.9107142857142857, - 0.05357142857142857, 0.9107142857142857, - 0.08928571428571429, 0.9107142857142857, - 0.08928571428571429, 0.9107142857142857, - 0.125, 0.9107142857142857, - 0.125, 0.9107142857142857, - 0.16071428571428573, 0.9107142857142857, - 0.16071428571428573, 0.9107142857142857, - 0.19642857142857142, 0.9107142857142857, - 0.19642857142857142, 0.9107142857142857, - 0.23214285714285715, 0.9107142857142857, - 0.23214285714285715, 0.9107142857142857, - 0.26785714285714285, 0.9107142857142857, - 0.26785714285714285, 0.9107142857142857, - 0.30357142857142855, 0.9107142857142857, - 0.30357142857142855, 0.9107142857142857, - 0.3392857142857143, 0.9107142857142857, - 0.3392857142857143, 0.9107142857142857, - 0.375, 0.9107142857142857, - 0.375, 0.9107142857142857, - 0.4107142857142857, 0.9107142857142857, - 0.4107142857142857, 0.9107142857142857, - 0.44642857142857145, 0.9107142857142857, - 0.44642857142857145, 0.9107142857142857, - 0.48214285714285715, 0.9107142857142857, - 0.48214285714285715, 0.9107142857142857, - 0.5178571428571429, 0.9107142857142857, - 0.5178571428571429, 0.9107142857142857, - 0.5535714285714286, 0.9107142857142857, - 0.5535714285714286, 0.9107142857142857, - 0.5892857142857143, 0.9107142857142857, - 0.5892857142857143, 0.9107142857142857, - 0.625, 0.9107142857142857, - 0.625, 0.9107142857142857, - 0.6607142857142857, 0.9107142857142857, - 0.6607142857142857, 0.9107142857142857, - 0.6964285714285714, 0.9107142857142857, - 0.6964285714285714, 0.9107142857142857, - 0.7321428571428571, 0.9107142857142857, - 0.7321428571428571, 0.9107142857142857, - 0.7678571428571429, 0.9107142857142857, - 0.7678571428571429, 0.9107142857142857, - 0.8035714285714286, 0.9107142857142857, - 0.8035714285714286, 0.9107142857142857, - 0.8392857142857143, 0.9107142857142857, - 0.8392857142857143, 0.9107142857142857, - 0.875, 0.9107142857142857, - 0.875, 0.9107142857142857, - 0.9107142857142857, 0.9107142857142857, - 0.9107142857142857, 0.9107142857142857, - 0.9464285714285714, 0.9107142857142857, - 0.9464285714285714, 0.9107142857142857, - 0.9821428571428571, 0.9107142857142857, - 0.9821428571428571, 0.9107142857142857, - 0.017857142857142856, 0.9464285714285714, - 0.017857142857142856, 0.9464285714285714, - 0.05357142857142857, 0.9464285714285714, - 0.05357142857142857, 0.9464285714285714, - 0.08928571428571429, 0.9464285714285714, - 0.08928571428571429, 0.9464285714285714, - 0.125, 0.9464285714285714, - 0.125, 0.9464285714285714, - 0.16071428571428573, 0.9464285714285714, - 0.16071428571428573, 0.9464285714285714, - 0.19642857142857142, 0.9464285714285714, - 0.19642857142857142, 0.9464285714285714, - 0.23214285714285715, 0.9464285714285714, - 0.23214285714285715, 0.9464285714285714, - 0.26785714285714285, 0.9464285714285714, - 0.26785714285714285, 0.9464285714285714, - 0.30357142857142855, 0.9464285714285714, - 0.30357142857142855, 0.9464285714285714, - 0.3392857142857143, 0.9464285714285714, - 0.3392857142857143, 0.9464285714285714, - 0.375, 0.9464285714285714, - 0.375, 0.9464285714285714, - 0.4107142857142857, 0.9464285714285714, - 0.4107142857142857, 0.9464285714285714, - 0.44642857142857145, 0.9464285714285714, - 0.44642857142857145, 0.9464285714285714, - 0.48214285714285715, 0.9464285714285714, - 0.48214285714285715, 0.9464285714285714, - 0.5178571428571429, 0.9464285714285714, - 0.5178571428571429, 0.9464285714285714, - 0.5535714285714286, 0.9464285714285714, - 0.5535714285714286, 0.9464285714285714, - 0.5892857142857143, 0.9464285714285714, - 0.5892857142857143, 0.9464285714285714, - 0.625, 0.9464285714285714, - 0.625, 0.9464285714285714, - 0.6607142857142857, 0.9464285714285714, - 0.6607142857142857, 0.9464285714285714, - 0.6964285714285714, 0.9464285714285714, - 0.6964285714285714, 0.9464285714285714, - 0.7321428571428571, 0.9464285714285714, - 0.7321428571428571, 0.9464285714285714, - 0.7678571428571429, 0.9464285714285714, - 0.7678571428571429, 0.9464285714285714, - 0.8035714285714286, 0.9464285714285714, - 0.8035714285714286, 0.9464285714285714, - 0.8392857142857143, 0.9464285714285714, - 0.8392857142857143, 0.9464285714285714, - 0.875, 0.9464285714285714, - 0.875, 0.9464285714285714, - 0.9107142857142857, 0.9464285714285714, - 0.9107142857142857, 0.9464285714285714, - 0.9464285714285714, 0.9464285714285714, - 0.9464285714285714, 0.9464285714285714, - 0.9821428571428571, 0.9464285714285714, - 0.9821428571428571, 0.9464285714285714, - 0.017857142857142856, 0.9821428571428571, - 0.017857142857142856, 0.9821428571428571, - 0.05357142857142857, 0.9821428571428571, - 0.05357142857142857, 0.9821428571428571, - 0.08928571428571429, 0.9821428571428571, - 0.08928571428571429, 0.9821428571428571, - 0.125, 0.9821428571428571, - 0.125, 0.9821428571428571, - 0.16071428571428573, 0.9821428571428571, - 0.16071428571428573, 0.9821428571428571, - 0.19642857142857142, 0.9821428571428571, - 0.19642857142857142, 0.9821428571428571, - 0.23214285714285715, 0.9821428571428571, - 0.23214285714285715, 0.9821428571428571, - 0.26785714285714285, 0.9821428571428571, - 0.26785714285714285, 0.9821428571428571, - 0.30357142857142855, 0.9821428571428571, - 0.30357142857142855, 0.9821428571428571, - 0.3392857142857143, 0.9821428571428571, - 0.3392857142857143, 0.9821428571428571, - 0.375, 0.9821428571428571, - 0.375, 0.9821428571428571, - 0.4107142857142857, 0.9821428571428571, - 0.4107142857142857, 0.9821428571428571, - 0.44642857142857145, 0.9821428571428571, - 0.44642857142857145, 0.9821428571428571, - 0.48214285714285715, 0.9821428571428571, - 0.48214285714285715, 0.9821428571428571, - 0.5178571428571429, 0.9821428571428571, - 0.5178571428571429, 0.9821428571428571, - 0.5535714285714286, 0.9821428571428571, - 0.5535714285714286, 0.9821428571428571, - 0.5892857142857143, 0.9821428571428571, - 0.5892857142857143, 0.9821428571428571, - 0.625, 0.9821428571428571, - 0.625, 0.9821428571428571, - 0.6607142857142857, 0.9821428571428571, - 0.6607142857142857, 0.9821428571428571, - 0.6964285714285714, 0.9821428571428571, - 0.6964285714285714, 0.9821428571428571, - 0.7321428571428571, 0.9821428571428571, - 0.7321428571428571, 0.9821428571428571, - 0.7678571428571429, 0.9821428571428571, - 0.7678571428571429, 0.9821428571428571, - 0.8035714285714286, 0.9821428571428571, - 0.8035714285714286, 0.9821428571428571, - 0.8392857142857143, 0.9821428571428571, - 0.8392857142857143, 0.9821428571428571, - 0.875, 0.9821428571428571, - 0.875, 0.9821428571428571, - 0.9107142857142857, 0.9821428571428571, - 0.9107142857142857, 0.9821428571428571, - 0.9464285714285714, 0.9821428571428571, - 0.9464285714285714, 0.9821428571428571, - 0.9821428571428571, 0.9821428571428571, - 0.9821428571428571, 0.9821428571428571, - 0.03571428571428571, 0.03571428571428571, - 0.03571428571428571, 0.03571428571428571, - 0.10714285714285714, 0.03571428571428571, - 0.10714285714285714, 0.03571428571428571, - 0.17857142857142858, 0.03571428571428571, - 0.17857142857142858, 0.03571428571428571, - 0.25, 0.03571428571428571, - 0.25, 0.03571428571428571, - 0.32142857142857145, 0.03571428571428571, - 0.32142857142857145, 0.03571428571428571, - 0.39285714285714285, 0.03571428571428571, - 0.39285714285714285, 0.03571428571428571, - 0.4642857142857143, 0.03571428571428571, - 0.4642857142857143, 0.03571428571428571, - 0.5357142857142857, 0.03571428571428571, - 0.5357142857142857, 0.03571428571428571, - 0.6071428571428571, 0.03571428571428571, - 0.6071428571428571, 0.03571428571428571, - 0.6785714285714286, 0.03571428571428571, - 0.6785714285714286, 0.03571428571428571, - 0.75, 0.03571428571428571, - 0.75, 0.03571428571428571, - 0.8214285714285714, 0.03571428571428571, - 0.8214285714285714, 0.03571428571428571, - 0.8928571428571429, 0.03571428571428571, - 0.8928571428571429, 0.03571428571428571, - 0.9642857142857143, 0.03571428571428571, - 0.9642857142857143, 0.03571428571428571, - 0.03571428571428571, 0.10714285714285714, - 0.03571428571428571, 0.10714285714285714, - 0.10714285714285714, 0.10714285714285714, - 0.10714285714285714, 0.10714285714285714, - 0.17857142857142858, 0.10714285714285714, - 0.17857142857142858, 0.10714285714285714, - 0.25, 0.10714285714285714, - 0.25, 0.10714285714285714, - 0.32142857142857145, 0.10714285714285714, - 0.32142857142857145, 0.10714285714285714, - 0.39285714285714285, 0.10714285714285714, - 0.39285714285714285, 0.10714285714285714, - 0.4642857142857143, 0.10714285714285714, - 0.4642857142857143, 0.10714285714285714, - 0.5357142857142857, 0.10714285714285714, - 0.5357142857142857, 0.10714285714285714, - 0.6071428571428571, 0.10714285714285714, - 0.6071428571428571, 0.10714285714285714, - 0.6785714285714286, 0.10714285714285714, - 0.6785714285714286, 0.10714285714285714, - 0.75, 0.10714285714285714, - 0.75, 0.10714285714285714, - 0.8214285714285714, 0.10714285714285714, - 0.8214285714285714, 0.10714285714285714, - 0.8928571428571429, 0.10714285714285714, - 0.8928571428571429, 0.10714285714285714, - 0.9642857142857143, 0.10714285714285714, - 0.9642857142857143, 0.10714285714285714, - 0.03571428571428571, 0.17857142857142858, - 0.03571428571428571, 0.17857142857142858, - 0.10714285714285714, 0.17857142857142858, - 0.10714285714285714, 0.17857142857142858, - 0.17857142857142858, 0.17857142857142858, - 0.17857142857142858, 0.17857142857142858, - 0.25, 0.17857142857142858, - 0.25, 0.17857142857142858, - 0.32142857142857145, 0.17857142857142858, - 0.32142857142857145, 0.17857142857142858, - 0.39285714285714285, 0.17857142857142858, - 0.39285714285714285, 0.17857142857142858, - 0.4642857142857143, 0.17857142857142858, - 0.4642857142857143, 0.17857142857142858, - 0.5357142857142857, 0.17857142857142858, - 0.5357142857142857, 0.17857142857142858, - 0.6071428571428571, 0.17857142857142858, - 0.6071428571428571, 0.17857142857142858, - 0.6785714285714286, 0.17857142857142858, - 0.6785714285714286, 0.17857142857142858, - 0.75, 0.17857142857142858, - 0.75, 0.17857142857142858, - 0.8214285714285714, 0.17857142857142858, - 0.8214285714285714, 0.17857142857142858, - 0.8928571428571429, 0.17857142857142858, - 0.8928571428571429, 0.17857142857142858, - 0.9642857142857143, 0.17857142857142858, - 0.9642857142857143, 0.17857142857142858, - 0.03571428571428571, 0.25, - 0.03571428571428571, 0.25, - 0.10714285714285714, 0.25, - 0.10714285714285714, 0.25, - 0.17857142857142858, 0.25, - 0.17857142857142858, 0.25, - 0.25, 0.25, - 0.25, 0.25, - 0.32142857142857145, 0.25, - 0.32142857142857145, 0.25, - 0.39285714285714285, 0.25, - 0.39285714285714285, 0.25, - 0.4642857142857143, 0.25, - 0.4642857142857143, 0.25, - 0.5357142857142857, 0.25, - 0.5357142857142857, 0.25, - 0.6071428571428571, 0.25, - 0.6071428571428571, 0.25, - 0.6785714285714286, 0.25, - 0.6785714285714286, 0.25, - 0.75, 0.25, - 0.75, 0.25, - 0.8214285714285714, 0.25, - 0.8214285714285714, 0.25, - 0.8928571428571429, 0.25, - 0.8928571428571429, 0.25, - 0.9642857142857143, 0.25, - 0.9642857142857143, 0.25, - 0.03571428571428571, 0.32142857142857145, - 0.03571428571428571, 0.32142857142857145, - 0.10714285714285714, 0.32142857142857145, - 0.10714285714285714, 0.32142857142857145, - 0.17857142857142858, 0.32142857142857145, - 0.17857142857142858, 0.32142857142857145, - 0.25, 0.32142857142857145, - 0.25, 0.32142857142857145, - 0.32142857142857145, 0.32142857142857145, - 0.32142857142857145, 0.32142857142857145, - 0.39285714285714285, 0.32142857142857145, - 0.39285714285714285, 0.32142857142857145, - 0.4642857142857143, 0.32142857142857145, - 0.4642857142857143, 0.32142857142857145, - 0.5357142857142857, 0.32142857142857145, - 0.5357142857142857, 0.32142857142857145, - 0.6071428571428571, 0.32142857142857145, - 0.6071428571428571, 0.32142857142857145, - 0.6785714285714286, 0.32142857142857145, - 0.6785714285714286, 0.32142857142857145, - 0.75, 0.32142857142857145, - 0.75, 0.32142857142857145, - 0.8214285714285714, 0.32142857142857145, - 0.8214285714285714, 0.32142857142857145, - 0.8928571428571429, 0.32142857142857145, - 0.8928571428571429, 0.32142857142857145, - 0.9642857142857143, 0.32142857142857145, - 0.9642857142857143, 0.32142857142857145, - 0.03571428571428571, 0.39285714285714285, - 0.03571428571428571, 0.39285714285714285, - 0.10714285714285714, 0.39285714285714285, - 0.10714285714285714, 0.39285714285714285, - 0.17857142857142858, 0.39285714285714285, - 0.17857142857142858, 0.39285714285714285, - 0.25, 0.39285714285714285, - 0.25, 0.39285714285714285, - 0.32142857142857145, 0.39285714285714285, - 0.32142857142857145, 0.39285714285714285, - 0.39285714285714285, 0.39285714285714285, - 0.39285714285714285, 0.39285714285714285, - 0.4642857142857143, 0.39285714285714285, - 0.4642857142857143, 0.39285714285714285, - 0.5357142857142857, 0.39285714285714285, - 0.5357142857142857, 0.39285714285714285, - 0.6071428571428571, 0.39285714285714285, - 0.6071428571428571, 0.39285714285714285, - 0.6785714285714286, 0.39285714285714285, - 0.6785714285714286, 0.39285714285714285, - 0.75, 0.39285714285714285, - 0.75, 0.39285714285714285, - 0.8214285714285714, 0.39285714285714285, - 0.8214285714285714, 0.39285714285714285, - 0.8928571428571429, 0.39285714285714285, - 0.8928571428571429, 0.39285714285714285, - 0.9642857142857143, 0.39285714285714285, - 0.9642857142857143, 0.39285714285714285, - 0.03571428571428571, 0.4642857142857143, - 0.03571428571428571, 0.4642857142857143, - 0.10714285714285714, 0.4642857142857143, - 0.10714285714285714, 0.4642857142857143, - 0.17857142857142858, 0.4642857142857143, - 0.17857142857142858, 0.4642857142857143, - 0.25, 0.4642857142857143, - 0.25, 0.4642857142857143, - 0.32142857142857145, 0.4642857142857143, - 0.32142857142857145, 0.4642857142857143, - 0.39285714285714285, 0.4642857142857143, - 0.39285714285714285, 0.4642857142857143, - 0.4642857142857143, 0.4642857142857143, - 0.4642857142857143, 0.4642857142857143, - 0.5357142857142857, 0.4642857142857143, - 0.5357142857142857, 0.4642857142857143, - 0.6071428571428571, 0.4642857142857143, - 0.6071428571428571, 0.4642857142857143, - 0.6785714285714286, 0.4642857142857143, - 0.6785714285714286, 0.4642857142857143, - 0.75, 0.4642857142857143, - 0.75, 0.4642857142857143, - 0.8214285714285714, 0.4642857142857143, - 0.8214285714285714, 0.4642857142857143, - 0.8928571428571429, 0.4642857142857143, - 0.8928571428571429, 0.4642857142857143, - 0.9642857142857143, 0.4642857142857143, - 0.9642857142857143, 0.4642857142857143, - 0.03571428571428571, 0.5357142857142857, - 0.03571428571428571, 0.5357142857142857, - 0.10714285714285714, 0.5357142857142857, - 0.10714285714285714, 0.5357142857142857, - 0.17857142857142858, 0.5357142857142857, - 0.17857142857142858, 0.5357142857142857, - 0.25, 0.5357142857142857, - 0.25, 0.5357142857142857, - 0.32142857142857145, 0.5357142857142857, - 0.32142857142857145, 0.5357142857142857, - 0.39285714285714285, 0.5357142857142857, - 0.39285714285714285, 0.5357142857142857, - 0.4642857142857143, 0.5357142857142857, - 0.4642857142857143, 0.5357142857142857, - 0.5357142857142857, 0.5357142857142857, - 0.5357142857142857, 0.5357142857142857, - 0.6071428571428571, 0.5357142857142857, - 0.6071428571428571, 0.5357142857142857, - 0.6785714285714286, 0.5357142857142857, - 0.6785714285714286, 0.5357142857142857, - 0.75, 0.5357142857142857, - 0.75, 0.5357142857142857, - 0.8214285714285714, 0.5357142857142857, - 0.8214285714285714, 0.5357142857142857, - 0.8928571428571429, 0.5357142857142857, - 0.8928571428571429, 0.5357142857142857, - 0.9642857142857143, 0.5357142857142857, - 0.9642857142857143, 0.5357142857142857, - 0.03571428571428571, 0.6071428571428571, - 0.03571428571428571, 0.6071428571428571, - 0.10714285714285714, 0.6071428571428571, - 0.10714285714285714, 0.6071428571428571, - 0.17857142857142858, 0.6071428571428571, - 0.17857142857142858, 0.6071428571428571, - 0.25, 0.6071428571428571, - 0.25, 0.6071428571428571, - 0.32142857142857145, 0.6071428571428571, - 0.32142857142857145, 0.6071428571428571, - 0.39285714285714285, 0.6071428571428571, - 0.39285714285714285, 0.6071428571428571, - 0.4642857142857143, 0.6071428571428571, - 0.4642857142857143, 0.6071428571428571, - 0.5357142857142857, 0.6071428571428571, - 0.5357142857142857, 0.6071428571428571, - 0.6071428571428571, 0.6071428571428571, - 0.6071428571428571, 0.6071428571428571, - 0.6785714285714286, 0.6071428571428571, - 0.6785714285714286, 0.6071428571428571, - 0.75, 0.6071428571428571, - 0.75, 0.6071428571428571, - 0.8214285714285714, 0.6071428571428571, - 0.8214285714285714, 0.6071428571428571, - 0.8928571428571429, 0.6071428571428571, - 0.8928571428571429, 0.6071428571428571, - 0.9642857142857143, 0.6071428571428571, - 0.9642857142857143, 0.6071428571428571, - 0.03571428571428571, 0.6785714285714286, - 0.03571428571428571, 0.6785714285714286, - 0.10714285714285714, 0.6785714285714286, - 0.10714285714285714, 0.6785714285714286, - 0.17857142857142858, 0.6785714285714286, - 0.17857142857142858, 0.6785714285714286, - 0.25, 0.6785714285714286, - 0.25, 0.6785714285714286, - 0.32142857142857145, 0.6785714285714286, - 0.32142857142857145, 0.6785714285714286, - 0.39285714285714285, 0.6785714285714286, - 0.39285714285714285, 0.6785714285714286, - 0.4642857142857143, 0.6785714285714286, - 0.4642857142857143, 0.6785714285714286, - 0.5357142857142857, 0.6785714285714286, - 0.5357142857142857, 0.6785714285714286, - 0.6071428571428571, 0.6785714285714286, - 0.6071428571428571, 0.6785714285714286, - 0.6785714285714286, 0.6785714285714286, - 0.6785714285714286, 0.6785714285714286, - 0.75, 0.6785714285714286, - 0.75, 0.6785714285714286, - 0.8214285714285714, 0.6785714285714286, - 0.8214285714285714, 0.6785714285714286, - 0.8928571428571429, 0.6785714285714286, - 0.8928571428571429, 0.6785714285714286, - 0.9642857142857143, 0.6785714285714286, - 0.9642857142857143, 0.6785714285714286, - 0.03571428571428571, 0.75, - 0.03571428571428571, 0.75, - 0.10714285714285714, 0.75, - 0.10714285714285714, 0.75, - 0.17857142857142858, 0.75, - 0.17857142857142858, 0.75, - 0.25, 0.75, - 0.25, 0.75, - 0.32142857142857145, 0.75, - 0.32142857142857145, 0.75, - 0.39285714285714285, 0.75, - 0.39285714285714285, 0.75, - 0.4642857142857143, 0.75, - 0.4642857142857143, 0.75, - 0.5357142857142857, 0.75, - 0.5357142857142857, 0.75, - 0.6071428571428571, 0.75, - 0.6071428571428571, 0.75, - 0.6785714285714286, 0.75, - 0.6785714285714286, 0.75, - 0.75, 0.75, - 0.75, 0.75, - 0.8214285714285714, 0.75, - 0.8214285714285714, 0.75, - 0.8928571428571429, 0.75, - 0.8928571428571429, 0.75, - 0.9642857142857143, 0.75, - 0.9642857142857143, 0.75, - 0.03571428571428571, 0.8214285714285714, - 0.03571428571428571, 0.8214285714285714, - 0.10714285714285714, 0.8214285714285714, - 0.10714285714285714, 0.8214285714285714, - 0.17857142857142858, 0.8214285714285714, - 0.17857142857142858, 0.8214285714285714, - 0.25, 0.8214285714285714, - 0.25, 0.8214285714285714, - 0.32142857142857145, 0.8214285714285714, - 0.32142857142857145, 0.8214285714285714, - 0.39285714285714285, 0.8214285714285714, - 0.39285714285714285, 0.8214285714285714, - 0.4642857142857143, 0.8214285714285714, - 0.4642857142857143, 0.8214285714285714, - 0.5357142857142857, 0.8214285714285714, - 0.5357142857142857, 0.8214285714285714, - 0.6071428571428571, 0.8214285714285714, - 0.6071428571428571, 0.8214285714285714, - 0.6785714285714286, 0.8214285714285714, - 0.6785714285714286, 0.8214285714285714, - 0.75, 0.8214285714285714, - 0.75, 0.8214285714285714, - 0.8214285714285714, 0.8214285714285714, - 0.8214285714285714, 0.8214285714285714, - 0.8928571428571429, 0.8214285714285714, - 0.8928571428571429, 0.8214285714285714, - 0.9642857142857143, 0.8214285714285714, - 0.9642857142857143, 0.8214285714285714, - 0.03571428571428571, 0.8928571428571429, - 0.03571428571428571, 0.8928571428571429, - 0.10714285714285714, 0.8928571428571429, - 0.10714285714285714, 0.8928571428571429, - 0.17857142857142858, 0.8928571428571429, - 0.17857142857142858, 0.8928571428571429, - 0.25, 0.8928571428571429, - 0.25, 0.8928571428571429, - 0.32142857142857145, 0.8928571428571429, - 0.32142857142857145, 0.8928571428571429, - 0.39285714285714285, 0.8928571428571429, - 0.39285714285714285, 0.8928571428571429, - 0.4642857142857143, 0.8928571428571429, - 0.4642857142857143, 0.8928571428571429, - 0.5357142857142857, 0.8928571428571429, - 0.5357142857142857, 0.8928571428571429, - 0.6071428571428571, 0.8928571428571429, - 0.6071428571428571, 0.8928571428571429, - 0.6785714285714286, 0.8928571428571429, - 0.6785714285714286, 0.8928571428571429, - 0.75, 0.8928571428571429, - 0.75, 0.8928571428571429, - 0.8214285714285714, 0.8928571428571429, - 0.8214285714285714, 0.8928571428571429, - 0.8928571428571429, 0.8928571428571429, - 0.8928571428571429, 0.8928571428571429, - 0.9642857142857143, 0.8928571428571429, - 0.9642857142857143, 0.8928571428571429, - 0.03571428571428571, 0.9642857142857143, - 0.03571428571428571, 0.9642857142857143, - 0.10714285714285714, 0.9642857142857143, - 0.10714285714285714, 0.9642857142857143, - 0.17857142857142858, 0.9642857142857143, - 0.17857142857142858, 0.9642857142857143, - 0.25, 0.9642857142857143, - 0.25, 0.9642857142857143, - 0.32142857142857145, 0.9642857142857143, - 0.32142857142857145, 0.9642857142857143, - 0.39285714285714285, 0.9642857142857143, - 0.39285714285714285, 0.9642857142857143, - 0.4642857142857143, 0.9642857142857143, - 0.4642857142857143, 0.9642857142857143, - 0.5357142857142857, 0.9642857142857143, - 0.5357142857142857, 0.9642857142857143, - 0.6071428571428571, 0.9642857142857143, - 0.6071428571428571, 0.9642857142857143, - 0.6785714285714286, 0.9642857142857143, - 0.6785714285714286, 0.9642857142857143, - 0.75, 0.9642857142857143, - 0.75, 0.9642857142857143, - 0.8214285714285714, 0.9642857142857143, - 0.8214285714285714, 0.9642857142857143, - 0.8928571428571429, 0.9642857142857143, - 0.8928571428571429, 0.9642857142857143, - 0.9642857142857143, 0.9642857142857143, - 0.9642857142857143, 0.9642857142857143, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286); - return anchor; -} diff --git a/models/person_detection_mediapipe/demo.py b/models/person_detection_mediapipe/demo.py deleted file mode 100644 index 43bb5855..00000000 --- a/models/person_detection_mediapipe/demo.py +++ /dev/null @@ -1,140 +0,0 @@ -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from mp_persondet import MPPersonDet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='Person Detector from MediaPipe') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='./person_detection_mediapipe_2023mar.onnx', - help='Usage: Set model path, defaults to person_detection_mediapipe_2023mar.onnx') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--score_threshold', type=float, default=0.5, - help='Usage: Set the minimum needed confidence for the model to identify a person, defaults to 0.5. Smaller values may result in faster detection, but will limit accuracy. Filter out persons of confidence < conf_threshold.') -parser.add_argument('--nms_threshold', type=float, default=0.3, - help='Usage: Suppress bounding boxes of iou >= nms_threshold. Default = 0.3.') -parser.add_argument('--top_k', type=int, default=5000, - help='Usage: Keep top_k bounding boxes before NMS.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(image, results, fps=None): - output = image.copy() - - if fps is not None: - cv.putText(output, 'FPS: {:.2f}'.format(fps), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255)) - - for idx, person in enumerate(results): - score = person[-1] - person_landmarks = person[4:-1].reshape(4, 2).astype(np.int32) - - hip_point = person_landmarks[0] - full_body = person_landmarks[1] - shoulder_point = person_landmarks[2] - upper_body = person_landmarks[3] - - # draw circle for full body - radius = np.linalg.norm(hip_point - full_body).astype(np.int32) - cv.circle(output, hip_point, radius, (255, 0, 0), 2) - - # draw circle for upper body - radius = np.linalg.norm(shoulder_point - upper_body).astype(np.int32) - cv.circle(output, shoulder_point, radius, (0, 255, 255), 2) - - # draw points for each keypoint - for p in person_landmarks: - cv.circle(output, p, 2, (0, 0, 255), 2) - - # put score - cv.putText(output, 'Score: {:.4f}'.format(score), (0, output.shape[0] - 48), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 255, 0)) - - cv.putText(output, 'Yellow: upper body circle', (0, output.shape[0] - 36), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 255, 255)) - cv.putText(output, 'Blue: full body circle', (0, output.shape[0] - 24), cv.FONT_HERSHEY_DUPLEX, 0.5, (255, 0, 0)) - cv.putText(output, 'Red: keypoint', (0, output.shape[0] - 12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - - return output - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate MPPersonDet - model = MPPersonDet(modelPath=args.model, - nmsThreshold=args.nms_threshold, - scoreThreshold=args.score_threshold, - topK=args.top_k, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - image = cv.imread(args.input) - - # Inference - results = model.infer(image) - if len(results) == 0: - print('Person not detected') - - # Draw results on the input image - image = visualize(image, results) - - # Save results if save is true - if args.save: - print('Resutls saved to result.jpg\n') - cv.imwrite('result.jpg', image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # Inference - tm.start() - results = model.infer(frame) - tm.stop() - - # Draw results on the input image - frame = visualize(frame, results, fps=tm.getFPS()) - - # Visualize results in a new Window - cv.imshow('MPPersonDet Demo', frame) - - tm.reset() - diff --git a/models/person_detection_mediapipe/example_outputs/mppersondet_demo.webp b/models/person_detection_mediapipe/example_outputs/mppersondet_demo.webp deleted file mode 100644 index 7cc4ec96..00000000 --- a/models/person_detection_mediapipe/example_outputs/mppersondet_demo.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5c2aeb6b5f2afa91063c737f983cf7e46e8096decd8476cc7817c0f8523d22e1 -size 708710 diff --git a/models/person_detection_mediapipe/mp_persondet.py b/models/person_detection_mediapipe/mp_persondet.py deleted file mode 100644 index 39d73720..00000000 --- a/models/person_detection_mediapipe/mp_persondet.py +++ /dev/null @@ -1,2366 +0,0 @@ -import numpy as np -import cv2 as cv - -class MPPersonDet: - def __init__(self, modelPath, nmsThreshold=0.3, scoreThreshold=0.5, topK=5000, backendId=0, targetId=0): - self.model_path = modelPath - self.nms_threshold = nmsThreshold - self.score_threshold = scoreThreshold - self.topK = topK - self.backend_id = backendId - self.target_id = targetId - - self.input_size = np.array([224, 224]) # wh - - self.model = cv.dnn.readNet(self.model_path) - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - self.anchors = self._load_anchors() - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self.backend_id = backendId - self.target_id = targetId - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - def _preprocess(self, image): - pad_bias = np.array([0., 0.]) # left, top - image = cv.cvtColor(image, cv.COLOR_BGR2RGB) - image = image.astype(np.float32) / 255.0 # norm - image = (image - 0.5) * 2 # [0, 1] -> [-1, 1] - ratio = min(self.input_size / image.shape[:2]) - if image.shape[0] != self.input_size[0] or image.shape[1] != self.input_size[1]: - # keep aspect ratio when resize - ratio_size = (np.array(image.shape[:2]) * ratio).astype(np.int32) - image = cv.resize(image, (ratio_size[1], ratio_size[0])) - pad_h = self.input_size[0] - ratio_size[0] - pad_w = self.input_size[1] - ratio_size[1] - pad_bias[0] = left = pad_w // 2 - pad_bias[1] = top = pad_h // 2 - right = pad_w - left - bottom = pad_h - top - image = cv.copyMakeBorder(image, top, bottom, left, right, cv.BORDER_CONSTANT, None, (0, 0, 0)) - - blob = np.transpose(image, [2, 0, 1]) - pad_bias = (pad_bias / ratio).astype(np.int32) - return blob[np.newaxis, :, :, :], pad_bias # chw -> nchw - - def infer(self, image): - h, w, _ = image.shape - - # Preprocess - input_blob, pad_bias = self._preprocess(image) - - # Forward - self.model.setInput(input_blob) - output_blob = self.model.forward(self.model.getUnconnectedOutLayersNames()) - - # Postprocess - results = self._postprocess(output_blob, np.array([w, h]), pad_bias) - - return results - - def _postprocess(self, output_blob, original_shape, pad_bias): - score = output_blob[1][0, :, 0] - box_delta = output_blob[0][0, :, 0:4] - landmark_delta = output_blob[0][0, :, 4:] - scale = max(original_shape) - - # get scores - score = score.astype(np.float64) - score = np.clip(score, -100, 100) - score = 1 / (1 + np.exp(-score)) - - # get boxes - cxy_delta = box_delta[:, :2] / self.input_size - wh_delta = box_delta[:, 2:] / self.input_size - xy1 = (cxy_delta - wh_delta / 2 + self.anchors) * scale - xy2 = (cxy_delta + wh_delta / 2 + self.anchors) * scale - boxes = np.concatenate([xy1, xy2], axis=1) - boxes -= [pad_bias[0], pad_bias[1], pad_bias[0], pad_bias[1]] - # NMS - keep_idx = cv.dnn.NMSBoxes(boxes, score, self.score_threshold, self.nms_threshold, top_k=self.topK) - if len(keep_idx) == 0: - return np.empty(shape=(0, 13)) - selected_score = score[keep_idx] - selected_box = boxes[keep_idx] - - # get landmarks - selected_landmarks = landmark_delta[keep_idx].reshape(-1, 4, 2) - selected_landmarks = selected_landmarks / self.input_size - selected_anchors = self.anchors[keep_idx] - for idx, landmark in enumerate(selected_landmarks): - landmark += selected_anchors[idx] - selected_landmarks *= scale - selected_landmarks -= pad_bias - - # TODO: still don't know the meaning of face bbox - # each landmark: hip center point; full body point; shoulder center point; upper body point; - # - # [ - # [face_bbox, landmarks, score] - # ... - # [face_bbox, landmarks, score] - # ] - return np.c_[selected_box.reshape(-1, 4), selected_landmarks.reshape(-1, 8), selected_score.reshape(-1, 1)] - - def _load_anchors(self): - return np.array([[0.017857142857142856, 0.017857142857142856], - [0.017857142857142856, 0.017857142857142856], - [0.05357142857142857, 0.017857142857142856], - [0.05357142857142857, 0.017857142857142856], - [0.08928571428571429, 0.017857142857142856], - [0.08928571428571429, 0.017857142857142856], - [0.125, 0.017857142857142856], - [0.125, 0.017857142857142856], - [0.16071428571428573, 0.017857142857142856], - [0.16071428571428573, 0.017857142857142856], - [0.19642857142857142, 0.017857142857142856], - [0.19642857142857142, 0.017857142857142856], - [0.23214285714285715, 0.017857142857142856], - [0.23214285714285715, 0.017857142857142856], - [0.26785714285714285, 0.017857142857142856], - [0.26785714285714285, 0.017857142857142856], - [0.30357142857142855, 0.017857142857142856], - [0.30357142857142855, 0.017857142857142856], - [0.3392857142857143, 0.017857142857142856], - [0.3392857142857143, 0.017857142857142856], - [0.375, 0.017857142857142856], - [0.375, 0.017857142857142856], - [0.4107142857142857, 0.017857142857142856], - [0.4107142857142857, 0.017857142857142856], - [0.44642857142857145, 0.017857142857142856], - [0.44642857142857145, 0.017857142857142856], - [0.48214285714285715, 0.017857142857142856], - [0.48214285714285715, 0.017857142857142856], - [0.5178571428571429, 0.017857142857142856], - [0.5178571428571429, 0.017857142857142856], - [0.5535714285714286, 0.017857142857142856], - [0.5535714285714286, 0.017857142857142856], - [0.5892857142857143, 0.017857142857142856], - [0.5892857142857143, 0.017857142857142856], - [0.625, 0.017857142857142856], - [0.625, 0.017857142857142856], - [0.6607142857142857, 0.017857142857142856], - [0.6607142857142857, 0.017857142857142856], - [0.6964285714285714, 0.017857142857142856], - [0.6964285714285714, 0.017857142857142856], - [0.7321428571428571, 0.017857142857142856], - [0.7321428571428571, 0.017857142857142856], - [0.7678571428571429, 0.017857142857142856], - [0.7678571428571429, 0.017857142857142856], - [0.8035714285714286, 0.017857142857142856], - [0.8035714285714286, 0.017857142857142856], - [0.8392857142857143, 0.017857142857142856], - [0.8392857142857143, 0.017857142857142856], - [0.875, 0.017857142857142856], - [0.875, 0.017857142857142856], - [0.9107142857142857, 0.017857142857142856], - [0.9107142857142857, 0.017857142857142856], - [0.9464285714285714, 0.017857142857142856], - [0.9464285714285714, 0.017857142857142856], - [0.9821428571428571, 0.017857142857142856], - [0.9821428571428571, 0.017857142857142856], - [0.017857142857142856, 0.05357142857142857], - [0.017857142857142856, 0.05357142857142857], - [0.05357142857142857, 0.05357142857142857], - [0.05357142857142857, 0.05357142857142857], - [0.08928571428571429, 0.05357142857142857], - [0.08928571428571429, 0.05357142857142857], - [0.125, 0.05357142857142857], - [0.125, 0.05357142857142857], - [0.16071428571428573, 0.05357142857142857], - [0.16071428571428573, 0.05357142857142857], - [0.19642857142857142, 0.05357142857142857], - [0.19642857142857142, 0.05357142857142857], - [0.23214285714285715, 0.05357142857142857], - [0.23214285714285715, 0.05357142857142857], - [0.26785714285714285, 0.05357142857142857], - [0.26785714285714285, 0.05357142857142857], - [0.30357142857142855, 0.05357142857142857], - [0.30357142857142855, 0.05357142857142857], - [0.3392857142857143, 0.05357142857142857], - [0.3392857142857143, 0.05357142857142857], - [0.375, 0.05357142857142857], - [0.375, 0.05357142857142857], - [0.4107142857142857, 0.05357142857142857], - [0.4107142857142857, 0.05357142857142857], - [0.44642857142857145, 0.05357142857142857], - [0.44642857142857145, 0.05357142857142857], - [0.48214285714285715, 0.05357142857142857], - [0.48214285714285715, 0.05357142857142857], - [0.5178571428571429, 0.05357142857142857], - [0.5178571428571429, 0.05357142857142857], - [0.5535714285714286, 0.05357142857142857], - [0.5535714285714286, 0.05357142857142857], - [0.5892857142857143, 0.05357142857142857], - [0.5892857142857143, 0.05357142857142857], - [0.625, 0.05357142857142857], - [0.625, 0.05357142857142857], - [0.6607142857142857, 0.05357142857142857], - [0.6607142857142857, 0.05357142857142857], - [0.6964285714285714, 0.05357142857142857], - [0.6964285714285714, 0.05357142857142857], - [0.7321428571428571, 0.05357142857142857], - [0.7321428571428571, 0.05357142857142857], - [0.7678571428571429, 0.05357142857142857], - [0.7678571428571429, 0.05357142857142857], - [0.8035714285714286, 0.05357142857142857], - [0.8035714285714286, 0.05357142857142857], - [0.8392857142857143, 0.05357142857142857], - [0.8392857142857143, 0.05357142857142857], - [0.875, 0.05357142857142857], - [0.875, 0.05357142857142857], - [0.9107142857142857, 0.05357142857142857], - [0.9107142857142857, 0.05357142857142857], - [0.9464285714285714, 0.05357142857142857], - [0.9464285714285714, 0.05357142857142857], - [0.9821428571428571, 0.05357142857142857], - [0.9821428571428571, 0.05357142857142857], - [0.017857142857142856, 0.08928571428571429], - [0.017857142857142856, 0.08928571428571429], - [0.05357142857142857, 0.08928571428571429], - [0.05357142857142857, 0.08928571428571429], - [0.08928571428571429, 0.08928571428571429], - [0.08928571428571429, 0.08928571428571429], - [0.125, 0.08928571428571429], - [0.125, 0.08928571428571429], - [0.16071428571428573, 0.08928571428571429], - [0.16071428571428573, 0.08928571428571429], - [0.19642857142857142, 0.08928571428571429], - [0.19642857142857142, 0.08928571428571429], - [0.23214285714285715, 0.08928571428571429], - [0.23214285714285715, 0.08928571428571429], - [0.26785714285714285, 0.08928571428571429], - [0.26785714285714285, 0.08928571428571429], - [0.30357142857142855, 0.08928571428571429], - [0.30357142857142855, 0.08928571428571429], - [0.3392857142857143, 0.08928571428571429], - [0.3392857142857143, 0.08928571428571429], - [0.375, 0.08928571428571429], - [0.375, 0.08928571428571429], - [0.4107142857142857, 0.08928571428571429], - [0.4107142857142857, 0.08928571428571429], - [0.44642857142857145, 0.08928571428571429], - [0.44642857142857145, 0.08928571428571429], - [0.48214285714285715, 0.08928571428571429], - [0.48214285714285715, 0.08928571428571429], - [0.5178571428571429, 0.08928571428571429], - [0.5178571428571429, 0.08928571428571429], - [0.5535714285714286, 0.08928571428571429], - [0.5535714285714286, 0.08928571428571429], - [0.5892857142857143, 0.08928571428571429], - [0.5892857142857143, 0.08928571428571429], - [0.625, 0.08928571428571429], - [0.625, 0.08928571428571429], - [0.6607142857142857, 0.08928571428571429], - [0.6607142857142857, 0.08928571428571429], - [0.6964285714285714, 0.08928571428571429], - [0.6964285714285714, 0.08928571428571429], - [0.7321428571428571, 0.08928571428571429], - [0.7321428571428571, 0.08928571428571429], - [0.7678571428571429, 0.08928571428571429], - [0.7678571428571429, 0.08928571428571429], - [0.8035714285714286, 0.08928571428571429], - [0.8035714285714286, 0.08928571428571429], - [0.8392857142857143, 0.08928571428571429], - [0.8392857142857143, 0.08928571428571429], - [0.875, 0.08928571428571429], - [0.875, 0.08928571428571429], - [0.9107142857142857, 0.08928571428571429], - [0.9107142857142857, 0.08928571428571429], - [0.9464285714285714, 0.08928571428571429], - [0.9464285714285714, 0.08928571428571429], - [0.9821428571428571, 0.08928571428571429], - [0.9821428571428571, 0.08928571428571429], - [0.017857142857142856, 0.125], - [0.017857142857142856, 0.125], - [0.05357142857142857, 0.125], - [0.05357142857142857, 0.125], - [0.08928571428571429, 0.125], - [0.08928571428571429, 0.125], - [0.125, 0.125], - [0.125, 0.125], - [0.16071428571428573, 0.125], - [0.16071428571428573, 0.125], - [0.19642857142857142, 0.125], - [0.19642857142857142, 0.125], - [0.23214285714285715, 0.125], - [0.23214285714285715, 0.125], - [0.26785714285714285, 0.125], - [0.26785714285714285, 0.125], - [0.30357142857142855, 0.125], - [0.30357142857142855, 0.125], - [0.3392857142857143, 0.125], - [0.3392857142857143, 0.125], - [0.375, 0.125], - [0.375, 0.125], - [0.4107142857142857, 0.125], - [0.4107142857142857, 0.125], - [0.44642857142857145, 0.125], - [0.44642857142857145, 0.125], - [0.48214285714285715, 0.125], - [0.48214285714285715, 0.125], - [0.5178571428571429, 0.125], - [0.5178571428571429, 0.125], - [0.5535714285714286, 0.125], - [0.5535714285714286, 0.125], - [0.5892857142857143, 0.125], - [0.5892857142857143, 0.125], - [0.625, 0.125], - [0.625, 0.125], - [0.6607142857142857, 0.125], - [0.6607142857142857, 0.125], - [0.6964285714285714, 0.125], - [0.6964285714285714, 0.125], - [0.7321428571428571, 0.125], - [0.7321428571428571, 0.125], - [0.7678571428571429, 0.125], - [0.7678571428571429, 0.125], - [0.8035714285714286, 0.125], - [0.8035714285714286, 0.125], - [0.8392857142857143, 0.125], - [0.8392857142857143, 0.125], - [0.875, 0.125], - [0.875, 0.125], - [0.9107142857142857, 0.125], - [0.9107142857142857, 0.125], - [0.9464285714285714, 0.125], - [0.9464285714285714, 0.125], - [0.9821428571428571, 0.125], - [0.9821428571428571, 0.125], - [0.017857142857142856, 0.16071428571428573], - [0.017857142857142856, 0.16071428571428573], - [0.05357142857142857, 0.16071428571428573], - [0.05357142857142857, 0.16071428571428573], - [0.08928571428571429, 0.16071428571428573], - [0.08928571428571429, 0.16071428571428573], - [0.125, 0.16071428571428573], - [0.125, 0.16071428571428573], - [0.16071428571428573, 0.16071428571428573], - [0.16071428571428573, 0.16071428571428573], - [0.19642857142857142, 0.16071428571428573], - [0.19642857142857142, 0.16071428571428573], - [0.23214285714285715, 0.16071428571428573], - [0.23214285714285715, 0.16071428571428573], - [0.26785714285714285, 0.16071428571428573], - [0.26785714285714285, 0.16071428571428573], - [0.30357142857142855, 0.16071428571428573], - [0.30357142857142855, 0.16071428571428573], - [0.3392857142857143, 0.16071428571428573], - [0.3392857142857143, 0.16071428571428573], - [0.375, 0.16071428571428573], - [0.375, 0.16071428571428573], - [0.4107142857142857, 0.16071428571428573], - [0.4107142857142857, 0.16071428571428573], - [0.44642857142857145, 0.16071428571428573], - [0.44642857142857145, 0.16071428571428573], - [0.48214285714285715, 0.16071428571428573], - [0.48214285714285715, 0.16071428571428573], - [0.5178571428571429, 0.16071428571428573], - [0.5178571428571429, 0.16071428571428573], - [0.5535714285714286, 0.16071428571428573], - [0.5535714285714286, 0.16071428571428573], - [0.5892857142857143, 0.16071428571428573], - [0.5892857142857143, 0.16071428571428573], - [0.625, 0.16071428571428573], - [0.625, 0.16071428571428573], - [0.6607142857142857, 0.16071428571428573], - [0.6607142857142857, 0.16071428571428573], - [0.6964285714285714, 0.16071428571428573], - [0.6964285714285714, 0.16071428571428573], - [0.7321428571428571, 0.16071428571428573], - [0.7321428571428571, 0.16071428571428573], - [0.7678571428571429, 0.16071428571428573], - [0.7678571428571429, 0.16071428571428573], - [0.8035714285714286, 0.16071428571428573], - [0.8035714285714286, 0.16071428571428573], - [0.8392857142857143, 0.16071428571428573], - [0.8392857142857143, 0.16071428571428573], - [0.875, 0.16071428571428573], - [0.875, 0.16071428571428573], - [0.9107142857142857, 0.16071428571428573], - [0.9107142857142857, 0.16071428571428573], - [0.9464285714285714, 0.16071428571428573], - [0.9464285714285714, 0.16071428571428573], - [0.9821428571428571, 0.16071428571428573], - [0.9821428571428571, 0.16071428571428573], - [0.017857142857142856, 0.19642857142857142], - [0.017857142857142856, 0.19642857142857142], - [0.05357142857142857, 0.19642857142857142], - [0.05357142857142857, 0.19642857142857142], - [0.08928571428571429, 0.19642857142857142], - [0.08928571428571429, 0.19642857142857142], - [0.125, 0.19642857142857142], - [0.125, 0.19642857142857142], - [0.16071428571428573, 0.19642857142857142], - [0.16071428571428573, 0.19642857142857142], - [0.19642857142857142, 0.19642857142857142], - [0.19642857142857142, 0.19642857142857142], - [0.23214285714285715, 0.19642857142857142], - [0.23214285714285715, 0.19642857142857142], - [0.26785714285714285, 0.19642857142857142], - [0.26785714285714285, 0.19642857142857142], - [0.30357142857142855, 0.19642857142857142], - [0.30357142857142855, 0.19642857142857142], - [0.3392857142857143, 0.19642857142857142], - [0.3392857142857143, 0.19642857142857142], - [0.375, 0.19642857142857142], - [0.375, 0.19642857142857142], - [0.4107142857142857, 0.19642857142857142], - [0.4107142857142857, 0.19642857142857142], - [0.44642857142857145, 0.19642857142857142], - [0.44642857142857145, 0.19642857142857142], - [0.48214285714285715, 0.19642857142857142], - [0.48214285714285715, 0.19642857142857142], - [0.5178571428571429, 0.19642857142857142], - [0.5178571428571429, 0.19642857142857142], - [0.5535714285714286, 0.19642857142857142], - [0.5535714285714286, 0.19642857142857142], - [0.5892857142857143, 0.19642857142857142], - [0.5892857142857143, 0.19642857142857142], - [0.625, 0.19642857142857142], - [0.625, 0.19642857142857142], - [0.6607142857142857, 0.19642857142857142], - [0.6607142857142857, 0.19642857142857142], - [0.6964285714285714, 0.19642857142857142], - [0.6964285714285714, 0.19642857142857142], - [0.7321428571428571, 0.19642857142857142], - [0.7321428571428571, 0.19642857142857142], - [0.7678571428571429, 0.19642857142857142], - [0.7678571428571429, 0.19642857142857142], - [0.8035714285714286, 0.19642857142857142], - [0.8035714285714286, 0.19642857142857142], - [0.8392857142857143, 0.19642857142857142], - [0.8392857142857143, 0.19642857142857142], - [0.875, 0.19642857142857142], - [0.875, 0.19642857142857142], - [0.9107142857142857, 0.19642857142857142], - [0.9107142857142857, 0.19642857142857142], - [0.9464285714285714, 0.19642857142857142], - [0.9464285714285714, 0.19642857142857142], - [0.9821428571428571, 0.19642857142857142], - [0.9821428571428571, 0.19642857142857142], - [0.017857142857142856, 0.23214285714285715], - [0.017857142857142856, 0.23214285714285715], - [0.05357142857142857, 0.23214285714285715], - [0.05357142857142857, 0.23214285714285715], - [0.08928571428571429, 0.23214285714285715], - [0.08928571428571429, 0.23214285714285715], - [0.125, 0.23214285714285715], - [0.125, 0.23214285714285715], - [0.16071428571428573, 0.23214285714285715], - [0.16071428571428573, 0.23214285714285715], - [0.19642857142857142, 0.23214285714285715], - [0.19642857142857142, 0.23214285714285715], - [0.23214285714285715, 0.23214285714285715], - [0.23214285714285715, 0.23214285714285715], - [0.26785714285714285, 0.23214285714285715], - [0.26785714285714285, 0.23214285714285715], - [0.30357142857142855, 0.23214285714285715], - [0.30357142857142855, 0.23214285714285715], - [0.3392857142857143, 0.23214285714285715], - [0.3392857142857143, 0.23214285714285715], - [0.375, 0.23214285714285715], - [0.375, 0.23214285714285715], - [0.4107142857142857, 0.23214285714285715], - [0.4107142857142857, 0.23214285714285715], - [0.44642857142857145, 0.23214285714285715], - [0.44642857142857145, 0.23214285714285715], - [0.48214285714285715, 0.23214285714285715], - [0.48214285714285715, 0.23214285714285715], - [0.5178571428571429, 0.23214285714285715], - [0.5178571428571429, 0.23214285714285715], - [0.5535714285714286, 0.23214285714285715], - [0.5535714285714286, 0.23214285714285715], - [0.5892857142857143, 0.23214285714285715], - [0.5892857142857143, 0.23214285714285715], - [0.625, 0.23214285714285715], - [0.625, 0.23214285714285715], - [0.6607142857142857, 0.23214285714285715], - [0.6607142857142857, 0.23214285714285715], - [0.6964285714285714, 0.23214285714285715], - [0.6964285714285714, 0.23214285714285715], - [0.7321428571428571, 0.23214285714285715], - [0.7321428571428571, 0.23214285714285715], - [0.7678571428571429, 0.23214285714285715], - [0.7678571428571429, 0.23214285714285715], - [0.8035714285714286, 0.23214285714285715], - [0.8035714285714286, 0.23214285714285715], - [0.8392857142857143, 0.23214285714285715], - [0.8392857142857143, 0.23214285714285715], - [0.875, 0.23214285714285715], - [0.875, 0.23214285714285715], - [0.9107142857142857, 0.23214285714285715], - [0.9107142857142857, 0.23214285714285715], - [0.9464285714285714, 0.23214285714285715], - [0.9464285714285714, 0.23214285714285715], - [0.9821428571428571, 0.23214285714285715], - [0.9821428571428571, 0.23214285714285715], - [0.017857142857142856, 0.26785714285714285], - [0.017857142857142856, 0.26785714285714285], - [0.05357142857142857, 0.26785714285714285], - [0.05357142857142857, 0.26785714285714285], - [0.08928571428571429, 0.26785714285714285], - [0.08928571428571429, 0.26785714285714285], - [0.125, 0.26785714285714285], - [0.125, 0.26785714285714285], - [0.16071428571428573, 0.26785714285714285], - [0.16071428571428573, 0.26785714285714285], - [0.19642857142857142, 0.26785714285714285], - [0.19642857142857142, 0.26785714285714285], - [0.23214285714285715, 0.26785714285714285], - [0.23214285714285715, 0.26785714285714285], - [0.26785714285714285, 0.26785714285714285], - [0.26785714285714285, 0.26785714285714285], - [0.30357142857142855, 0.26785714285714285], - [0.30357142857142855, 0.26785714285714285], - [0.3392857142857143, 0.26785714285714285], - [0.3392857142857143, 0.26785714285714285], - [0.375, 0.26785714285714285], - [0.375, 0.26785714285714285], - [0.4107142857142857, 0.26785714285714285], - [0.4107142857142857, 0.26785714285714285], - [0.44642857142857145, 0.26785714285714285], - [0.44642857142857145, 0.26785714285714285], - [0.48214285714285715, 0.26785714285714285], - [0.48214285714285715, 0.26785714285714285], - [0.5178571428571429, 0.26785714285714285], - [0.5178571428571429, 0.26785714285714285], - [0.5535714285714286, 0.26785714285714285], - [0.5535714285714286, 0.26785714285714285], - [0.5892857142857143, 0.26785714285714285], - [0.5892857142857143, 0.26785714285714285], - [0.625, 0.26785714285714285], - [0.625, 0.26785714285714285], - [0.6607142857142857, 0.26785714285714285], - [0.6607142857142857, 0.26785714285714285], - [0.6964285714285714, 0.26785714285714285], - [0.6964285714285714, 0.26785714285714285], - [0.7321428571428571, 0.26785714285714285], - [0.7321428571428571, 0.26785714285714285], - [0.7678571428571429, 0.26785714285714285], - [0.7678571428571429, 0.26785714285714285], - [0.8035714285714286, 0.26785714285714285], - [0.8035714285714286, 0.26785714285714285], - [0.8392857142857143, 0.26785714285714285], - [0.8392857142857143, 0.26785714285714285], - [0.875, 0.26785714285714285], - [0.875, 0.26785714285714285], - [0.9107142857142857, 0.26785714285714285], - [0.9107142857142857, 0.26785714285714285], - [0.9464285714285714, 0.26785714285714285], - [0.9464285714285714, 0.26785714285714285], - [0.9821428571428571, 0.26785714285714285], - [0.9821428571428571, 0.26785714285714285], - [0.017857142857142856, 0.30357142857142855], - [0.017857142857142856, 0.30357142857142855], - [0.05357142857142857, 0.30357142857142855], - [0.05357142857142857, 0.30357142857142855], - [0.08928571428571429, 0.30357142857142855], - [0.08928571428571429, 0.30357142857142855], - [0.125, 0.30357142857142855], - [0.125, 0.30357142857142855], - [0.16071428571428573, 0.30357142857142855], - [0.16071428571428573, 0.30357142857142855], - [0.19642857142857142, 0.30357142857142855], - [0.19642857142857142, 0.30357142857142855], - [0.23214285714285715, 0.30357142857142855], - [0.23214285714285715, 0.30357142857142855], - [0.26785714285714285, 0.30357142857142855], - [0.26785714285714285, 0.30357142857142855], - [0.30357142857142855, 0.30357142857142855], - [0.30357142857142855, 0.30357142857142855], - [0.3392857142857143, 0.30357142857142855], - [0.3392857142857143, 0.30357142857142855], - [0.375, 0.30357142857142855], - [0.375, 0.30357142857142855], - [0.4107142857142857, 0.30357142857142855], - [0.4107142857142857, 0.30357142857142855], - [0.44642857142857145, 0.30357142857142855], - [0.44642857142857145, 0.30357142857142855], - [0.48214285714285715, 0.30357142857142855], - [0.48214285714285715, 0.30357142857142855], - [0.5178571428571429, 0.30357142857142855], - [0.5178571428571429, 0.30357142857142855], - [0.5535714285714286, 0.30357142857142855], - [0.5535714285714286, 0.30357142857142855], - [0.5892857142857143, 0.30357142857142855], - [0.5892857142857143, 0.30357142857142855], - [0.625, 0.30357142857142855], - [0.625, 0.30357142857142855], - [0.6607142857142857, 0.30357142857142855], - [0.6607142857142857, 0.30357142857142855], - [0.6964285714285714, 0.30357142857142855], - [0.6964285714285714, 0.30357142857142855], - [0.7321428571428571, 0.30357142857142855], - [0.7321428571428571, 0.30357142857142855], - [0.7678571428571429, 0.30357142857142855], - [0.7678571428571429, 0.30357142857142855], - [0.8035714285714286, 0.30357142857142855], - [0.8035714285714286, 0.30357142857142855], - [0.8392857142857143, 0.30357142857142855], - [0.8392857142857143, 0.30357142857142855], - [0.875, 0.30357142857142855], - [0.875, 0.30357142857142855], - [0.9107142857142857, 0.30357142857142855], - [0.9107142857142857, 0.30357142857142855], - [0.9464285714285714, 0.30357142857142855], - [0.9464285714285714, 0.30357142857142855], - [0.9821428571428571, 0.30357142857142855], - [0.9821428571428571, 0.30357142857142855], - [0.017857142857142856, 0.3392857142857143], - [0.017857142857142856, 0.3392857142857143], - [0.05357142857142857, 0.3392857142857143], - [0.05357142857142857, 0.3392857142857143], - [0.08928571428571429, 0.3392857142857143], - [0.08928571428571429, 0.3392857142857143], - [0.125, 0.3392857142857143], - [0.125, 0.3392857142857143], - [0.16071428571428573, 0.3392857142857143], - [0.16071428571428573, 0.3392857142857143], - [0.19642857142857142, 0.3392857142857143], - [0.19642857142857142, 0.3392857142857143], - [0.23214285714285715, 0.3392857142857143], - [0.23214285714285715, 0.3392857142857143], - [0.26785714285714285, 0.3392857142857143], - [0.26785714285714285, 0.3392857142857143], - [0.30357142857142855, 0.3392857142857143], - [0.30357142857142855, 0.3392857142857143], - [0.3392857142857143, 0.3392857142857143], - [0.3392857142857143, 0.3392857142857143], - [0.375, 0.3392857142857143], - [0.375, 0.3392857142857143], - [0.4107142857142857, 0.3392857142857143], - [0.4107142857142857, 0.3392857142857143], - [0.44642857142857145, 0.3392857142857143], - [0.44642857142857145, 0.3392857142857143], - [0.48214285714285715, 0.3392857142857143], - [0.48214285714285715, 0.3392857142857143], - [0.5178571428571429, 0.3392857142857143], - [0.5178571428571429, 0.3392857142857143], - [0.5535714285714286, 0.3392857142857143], - [0.5535714285714286, 0.3392857142857143], - [0.5892857142857143, 0.3392857142857143], - [0.5892857142857143, 0.3392857142857143], - [0.625, 0.3392857142857143], - [0.625, 0.3392857142857143], - [0.6607142857142857, 0.3392857142857143], - [0.6607142857142857, 0.3392857142857143], - [0.6964285714285714, 0.3392857142857143], - [0.6964285714285714, 0.3392857142857143], - [0.7321428571428571, 0.3392857142857143], - [0.7321428571428571, 0.3392857142857143], - [0.7678571428571429, 0.3392857142857143], - [0.7678571428571429, 0.3392857142857143], - [0.8035714285714286, 0.3392857142857143], - [0.8035714285714286, 0.3392857142857143], - [0.8392857142857143, 0.3392857142857143], - [0.8392857142857143, 0.3392857142857143], - [0.875, 0.3392857142857143], - [0.875, 0.3392857142857143], - [0.9107142857142857, 0.3392857142857143], - [0.9107142857142857, 0.3392857142857143], - [0.9464285714285714, 0.3392857142857143], - [0.9464285714285714, 0.3392857142857143], - [0.9821428571428571, 0.3392857142857143], - [0.9821428571428571, 0.3392857142857143], - [0.017857142857142856, 0.375], - [0.017857142857142856, 0.375], - [0.05357142857142857, 0.375], - [0.05357142857142857, 0.375], - [0.08928571428571429, 0.375], - [0.08928571428571429, 0.375], - [0.125, 0.375], - [0.125, 0.375], - [0.16071428571428573, 0.375], - [0.16071428571428573, 0.375], - [0.19642857142857142, 0.375], - [0.19642857142857142, 0.375], - [0.23214285714285715, 0.375], - [0.23214285714285715, 0.375], - [0.26785714285714285, 0.375], - [0.26785714285714285, 0.375], - [0.30357142857142855, 0.375], - [0.30357142857142855, 0.375], - [0.3392857142857143, 0.375], - [0.3392857142857143, 0.375], - [0.375, 0.375], - [0.375, 0.375], - [0.4107142857142857, 0.375], - [0.4107142857142857, 0.375], - [0.44642857142857145, 0.375], - [0.44642857142857145, 0.375], - [0.48214285714285715, 0.375], - [0.48214285714285715, 0.375], - [0.5178571428571429, 0.375], - [0.5178571428571429, 0.375], - [0.5535714285714286, 0.375], - [0.5535714285714286, 0.375], - [0.5892857142857143, 0.375], - [0.5892857142857143, 0.375], - [0.625, 0.375], - [0.625, 0.375], - [0.6607142857142857, 0.375], - [0.6607142857142857, 0.375], - [0.6964285714285714, 0.375], - [0.6964285714285714, 0.375], - [0.7321428571428571, 0.375], - [0.7321428571428571, 0.375], - [0.7678571428571429, 0.375], - [0.7678571428571429, 0.375], - [0.8035714285714286, 0.375], - [0.8035714285714286, 0.375], - [0.8392857142857143, 0.375], - [0.8392857142857143, 0.375], - [0.875, 0.375], - [0.875, 0.375], - [0.9107142857142857, 0.375], - [0.9107142857142857, 0.375], - [0.9464285714285714, 0.375], - [0.9464285714285714, 0.375], - [0.9821428571428571, 0.375], - [0.9821428571428571, 0.375], - [0.017857142857142856, 0.4107142857142857], - [0.017857142857142856, 0.4107142857142857], - [0.05357142857142857, 0.4107142857142857], - [0.05357142857142857, 0.4107142857142857], - [0.08928571428571429, 0.4107142857142857], - [0.08928571428571429, 0.4107142857142857], - [0.125, 0.4107142857142857], - [0.125, 0.4107142857142857], - [0.16071428571428573, 0.4107142857142857], - [0.16071428571428573, 0.4107142857142857], - [0.19642857142857142, 0.4107142857142857], - [0.19642857142857142, 0.4107142857142857], - [0.23214285714285715, 0.4107142857142857], - [0.23214285714285715, 0.4107142857142857], - [0.26785714285714285, 0.4107142857142857], - [0.26785714285714285, 0.4107142857142857], - [0.30357142857142855, 0.4107142857142857], - [0.30357142857142855, 0.4107142857142857], - [0.3392857142857143, 0.4107142857142857], - [0.3392857142857143, 0.4107142857142857], - [0.375, 0.4107142857142857], - [0.375, 0.4107142857142857], - [0.4107142857142857, 0.4107142857142857], - [0.4107142857142857, 0.4107142857142857], - [0.44642857142857145, 0.4107142857142857], - [0.44642857142857145, 0.4107142857142857], - [0.48214285714285715, 0.4107142857142857], - [0.48214285714285715, 0.4107142857142857], - [0.5178571428571429, 0.4107142857142857], - [0.5178571428571429, 0.4107142857142857], - [0.5535714285714286, 0.4107142857142857], - [0.5535714285714286, 0.4107142857142857], - [0.5892857142857143, 0.4107142857142857], - [0.5892857142857143, 0.4107142857142857], - [0.625, 0.4107142857142857], - [0.625, 0.4107142857142857], - [0.6607142857142857, 0.4107142857142857], - [0.6607142857142857, 0.4107142857142857], - [0.6964285714285714, 0.4107142857142857], - [0.6964285714285714, 0.4107142857142857], - [0.7321428571428571, 0.4107142857142857], - [0.7321428571428571, 0.4107142857142857], - [0.7678571428571429, 0.4107142857142857], - [0.7678571428571429, 0.4107142857142857], - [0.8035714285714286, 0.4107142857142857], - [0.8035714285714286, 0.4107142857142857], - [0.8392857142857143, 0.4107142857142857], - [0.8392857142857143, 0.4107142857142857], - [0.875, 0.4107142857142857], - [0.875, 0.4107142857142857], - [0.9107142857142857, 0.4107142857142857], - [0.9107142857142857, 0.4107142857142857], - [0.9464285714285714, 0.4107142857142857], - [0.9464285714285714, 0.4107142857142857], - [0.9821428571428571, 0.4107142857142857], - [0.9821428571428571, 0.4107142857142857], - [0.017857142857142856, 0.44642857142857145], - [0.017857142857142856, 0.44642857142857145], - [0.05357142857142857, 0.44642857142857145], - [0.05357142857142857, 0.44642857142857145], - [0.08928571428571429, 0.44642857142857145], - [0.08928571428571429, 0.44642857142857145], - [0.125, 0.44642857142857145], - [0.125, 0.44642857142857145], - [0.16071428571428573, 0.44642857142857145], - [0.16071428571428573, 0.44642857142857145], - [0.19642857142857142, 0.44642857142857145], - [0.19642857142857142, 0.44642857142857145], - [0.23214285714285715, 0.44642857142857145], - [0.23214285714285715, 0.44642857142857145], - [0.26785714285714285, 0.44642857142857145], - [0.26785714285714285, 0.44642857142857145], - [0.30357142857142855, 0.44642857142857145], - [0.30357142857142855, 0.44642857142857145], - [0.3392857142857143, 0.44642857142857145], - [0.3392857142857143, 0.44642857142857145], - [0.375, 0.44642857142857145], - [0.375, 0.44642857142857145], - [0.4107142857142857, 0.44642857142857145], - [0.4107142857142857, 0.44642857142857145], - [0.44642857142857145, 0.44642857142857145], - [0.44642857142857145, 0.44642857142857145], - [0.48214285714285715, 0.44642857142857145], - [0.48214285714285715, 0.44642857142857145], - [0.5178571428571429, 0.44642857142857145], - [0.5178571428571429, 0.44642857142857145], - [0.5535714285714286, 0.44642857142857145], - [0.5535714285714286, 0.44642857142857145], - [0.5892857142857143, 0.44642857142857145], - [0.5892857142857143, 0.44642857142857145], - [0.625, 0.44642857142857145], - [0.625, 0.44642857142857145], - [0.6607142857142857, 0.44642857142857145], - [0.6607142857142857, 0.44642857142857145], - [0.6964285714285714, 0.44642857142857145], - [0.6964285714285714, 0.44642857142857145], - [0.7321428571428571, 0.44642857142857145], - [0.7321428571428571, 0.44642857142857145], - [0.7678571428571429, 0.44642857142857145], - [0.7678571428571429, 0.44642857142857145], - [0.8035714285714286, 0.44642857142857145], - [0.8035714285714286, 0.44642857142857145], - [0.8392857142857143, 0.44642857142857145], - [0.8392857142857143, 0.44642857142857145], - [0.875, 0.44642857142857145], - [0.875, 0.44642857142857145], - [0.9107142857142857, 0.44642857142857145], - [0.9107142857142857, 0.44642857142857145], - [0.9464285714285714, 0.44642857142857145], - [0.9464285714285714, 0.44642857142857145], - [0.9821428571428571, 0.44642857142857145], - [0.9821428571428571, 0.44642857142857145], - [0.017857142857142856, 0.48214285714285715], - [0.017857142857142856, 0.48214285714285715], - [0.05357142857142857, 0.48214285714285715], - [0.05357142857142857, 0.48214285714285715], - [0.08928571428571429, 0.48214285714285715], - [0.08928571428571429, 0.48214285714285715], - [0.125, 0.48214285714285715], - [0.125, 0.48214285714285715], - [0.16071428571428573, 0.48214285714285715], - [0.16071428571428573, 0.48214285714285715], - [0.19642857142857142, 0.48214285714285715], - [0.19642857142857142, 0.48214285714285715], - [0.23214285714285715, 0.48214285714285715], - [0.23214285714285715, 0.48214285714285715], - [0.26785714285714285, 0.48214285714285715], - [0.26785714285714285, 0.48214285714285715], - [0.30357142857142855, 0.48214285714285715], - [0.30357142857142855, 0.48214285714285715], - [0.3392857142857143, 0.48214285714285715], - [0.3392857142857143, 0.48214285714285715], - [0.375, 0.48214285714285715], - [0.375, 0.48214285714285715], - [0.4107142857142857, 0.48214285714285715], - [0.4107142857142857, 0.48214285714285715], - [0.44642857142857145, 0.48214285714285715], - [0.44642857142857145, 0.48214285714285715], - [0.48214285714285715, 0.48214285714285715], - [0.48214285714285715, 0.48214285714285715], - [0.5178571428571429, 0.48214285714285715], - [0.5178571428571429, 0.48214285714285715], - [0.5535714285714286, 0.48214285714285715], - [0.5535714285714286, 0.48214285714285715], - [0.5892857142857143, 0.48214285714285715], - [0.5892857142857143, 0.48214285714285715], - [0.625, 0.48214285714285715], - [0.625, 0.48214285714285715], - [0.6607142857142857, 0.48214285714285715], - [0.6607142857142857, 0.48214285714285715], - [0.6964285714285714, 0.48214285714285715], - [0.6964285714285714, 0.48214285714285715], - [0.7321428571428571, 0.48214285714285715], - [0.7321428571428571, 0.48214285714285715], - [0.7678571428571429, 0.48214285714285715], - [0.7678571428571429, 0.48214285714285715], - [0.8035714285714286, 0.48214285714285715], - [0.8035714285714286, 0.48214285714285715], - [0.8392857142857143, 0.48214285714285715], - [0.8392857142857143, 0.48214285714285715], - [0.875, 0.48214285714285715], - [0.875, 0.48214285714285715], - [0.9107142857142857, 0.48214285714285715], - [0.9107142857142857, 0.48214285714285715], - [0.9464285714285714, 0.48214285714285715], - [0.9464285714285714, 0.48214285714285715], - [0.9821428571428571, 0.48214285714285715], - [0.9821428571428571, 0.48214285714285715], - [0.017857142857142856, 0.5178571428571429], - [0.017857142857142856, 0.5178571428571429], - [0.05357142857142857, 0.5178571428571429], - [0.05357142857142857, 0.5178571428571429], - [0.08928571428571429, 0.5178571428571429], - [0.08928571428571429, 0.5178571428571429], - [0.125, 0.5178571428571429], - [0.125, 0.5178571428571429], - [0.16071428571428573, 0.5178571428571429], - [0.16071428571428573, 0.5178571428571429], - [0.19642857142857142, 0.5178571428571429], - [0.19642857142857142, 0.5178571428571429], - [0.23214285714285715, 0.5178571428571429], - [0.23214285714285715, 0.5178571428571429], - [0.26785714285714285, 0.5178571428571429], - [0.26785714285714285, 0.5178571428571429], - [0.30357142857142855, 0.5178571428571429], - [0.30357142857142855, 0.5178571428571429], - [0.3392857142857143, 0.5178571428571429], - [0.3392857142857143, 0.5178571428571429], - [0.375, 0.5178571428571429], - [0.375, 0.5178571428571429], - [0.4107142857142857, 0.5178571428571429], - [0.4107142857142857, 0.5178571428571429], - [0.44642857142857145, 0.5178571428571429], - [0.44642857142857145, 0.5178571428571429], - [0.48214285714285715, 0.5178571428571429], - [0.48214285714285715, 0.5178571428571429], - [0.5178571428571429, 0.5178571428571429], - [0.5178571428571429, 0.5178571428571429], - [0.5535714285714286, 0.5178571428571429], - [0.5535714285714286, 0.5178571428571429], - [0.5892857142857143, 0.5178571428571429], - [0.5892857142857143, 0.5178571428571429], - [0.625, 0.5178571428571429], - [0.625, 0.5178571428571429], - [0.6607142857142857, 0.5178571428571429], - [0.6607142857142857, 0.5178571428571429], - [0.6964285714285714, 0.5178571428571429], - [0.6964285714285714, 0.5178571428571429], - [0.7321428571428571, 0.5178571428571429], - [0.7321428571428571, 0.5178571428571429], - [0.7678571428571429, 0.5178571428571429], - [0.7678571428571429, 0.5178571428571429], - [0.8035714285714286, 0.5178571428571429], - [0.8035714285714286, 0.5178571428571429], - [0.8392857142857143, 0.5178571428571429], - [0.8392857142857143, 0.5178571428571429], - [0.875, 0.5178571428571429], - [0.875, 0.5178571428571429], - [0.9107142857142857, 0.5178571428571429], - [0.9107142857142857, 0.5178571428571429], - [0.9464285714285714, 0.5178571428571429], - [0.9464285714285714, 0.5178571428571429], - [0.9821428571428571, 0.5178571428571429], - [0.9821428571428571, 0.5178571428571429], - [0.017857142857142856, 0.5535714285714286], - [0.017857142857142856, 0.5535714285714286], - [0.05357142857142857, 0.5535714285714286], - [0.05357142857142857, 0.5535714285714286], - [0.08928571428571429, 0.5535714285714286], - [0.08928571428571429, 0.5535714285714286], - [0.125, 0.5535714285714286], - [0.125, 0.5535714285714286], - [0.16071428571428573, 0.5535714285714286], - [0.16071428571428573, 0.5535714285714286], - [0.19642857142857142, 0.5535714285714286], - [0.19642857142857142, 0.5535714285714286], - [0.23214285714285715, 0.5535714285714286], - [0.23214285714285715, 0.5535714285714286], - [0.26785714285714285, 0.5535714285714286], - [0.26785714285714285, 0.5535714285714286], - [0.30357142857142855, 0.5535714285714286], - [0.30357142857142855, 0.5535714285714286], - [0.3392857142857143, 0.5535714285714286], - [0.3392857142857143, 0.5535714285714286], - [0.375, 0.5535714285714286], - [0.375, 0.5535714285714286], - [0.4107142857142857, 0.5535714285714286], - [0.4107142857142857, 0.5535714285714286], - [0.44642857142857145, 0.5535714285714286], - [0.44642857142857145, 0.5535714285714286], - [0.48214285714285715, 0.5535714285714286], - [0.48214285714285715, 0.5535714285714286], - [0.5178571428571429, 0.5535714285714286], - [0.5178571428571429, 0.5535714285714286], - [0.5535714285714286, 0.5535714285714286], - [0.5535714285714286, 0.5535714285714286], - [0.5892857142857143, 0.5535714285714286], - [0.5892857142857143, 0.5535714285714286], - [0.625, 0.5535714285714286], - [0.625, 0.5535714285714286], - [0.6607142857142857, 0.5535714285714286], - [0.6607142857142857, 0.5535714285714286], - [0.6964285714285714, 0.5535714285714286], - [0.6964285714285714, 0.5535714285714286], - [0.7321428571428571, 0.5535714285714286], - [0.7321428571428571, 0.5535714285714286], - [0.7678571428571429, 0.5535714285714286], - [0.7678571428571429, 0.5535714285714286], - [0.8035714285714286, 0.5535714285714286], - [0.8035714285714286, 0.5535714285714286], - [0.8392857142857143, 0.5535714285714286], - [0.8392857142857143, 0.5535714285714286], - [0.875, 0.5535714285714286], - [0.875, 0.5535714285714286], - [0.9107142857142857, 0.5535714285714286], - [0.9107142857142857, 0.5535714285714286], - [0.9464285714285714, 0.5535714285714286], - [0.9464285714285714, 0.5535714285714286], - [0.9821428571428571, 0.5535714285714286], - [0.9821428571428571, 0.5535714285714286], - [0.017857142857142856, 0.5892857142857143], - [0.017857142857142856, 0.5892857142857143], - [0.05357142857142857, 0.5892857142857143], - [0.05357142857142857, 0.5892857142857143], - [0.08928571428571429, 0.5892857142857143], - [0.08928571428571429, 0.5892857142857143], - [0.125, 0.5892857142857143], - [0.125, 0.5892857142857143], - [0.16071428571428573, 0.5892857142857143], - [0.16071428571428573, 0.5892857142857143], - [0.19642857142857142, 0.5892857142857143], - [0.19642857142857142, 0.5892857142857143], - [0.23214285714285715, 0.5892857142857143], - [0.23214285714285715, 0.5892857142857143], - [0.26785714285714285, 0.5892857142857143], - [0.26785714285714285, 0.5892857142857143], - [0.30357142857142855, 0.5892857142857143], - [0.30357142857142855, 0.5892857142857143], - [0.3392857142857143, 0.5892857142857143], - [0.3392857142857143, 0.5892857142857143], - [0.375, 0.5892857142857143], - [0.375, 0.5892857142857143], - [0.4107142857142857, 0.5892857142857143], - [0.4107142857142857, 0.5892857142857143], - [0.44642857142857145, 0.5892857142857143], - [0.44642857142857145, 0.5892857142857143], - [0.48214285714285715, 0.5892857142857143], - [0.48214285714285715, 0.5892857142857143], - [0.5178571428571429, 0.5892857142857143], - [0.5178571428571429, 0.5892857142857143], - [0.5535714285714286, 0.5892857142857143], - [0.5535714285714286, 0.5892857142857143], - [0.5892857142857143, 0.5892857142857143], - [0.5892857142857143, 0.5892857142857143], - [0.625, 0.5892857142857143], - [0.625, 0.5892857142857143], - [0.6607142857142857, 0.5892857142857143], - [0.6607142857142857, 0.5892857142857143], - [0.6964285714285714, 0.5892857142857143], - [0.6964285714285714, 0.5892857142857143], - [0.7321428571428571, 0.5892857142857143], - [0.7321428571428571, 0.5892857142857143], - [0.7678571428571429, 0.5892857142857143], - [0.7678571428571429, 0.5892857142857143], - [0.8035714285714286, 0.5892857142857143], - [0.8035714285714286, 0.5892857142857143], - [0.8392857142857143, 0.5892857142857143], - [0.8392857142857143, 0.5892857142857143], - [0.875, 0.5892857142857143], - [0.875, 0.5892857142857143], - [0.9107142857142857, 0.5892857142857143], - [0.9107142857142857, 0.5892857142857143], - [0.9464285714285714, 0.5892857142857143], - [0.9464285714285714, 0.5892857142857143], - [0.9821428571428571, 0.5892857142857143], - [0.9821428571428571, 0.5892857142857143], - [0.017857142857142856, 0.625], - [0.017857142857142856, 0.625], - [0.05357142857142857, 0.625], - [0.05357142857142857, 0.625], - [0.08928571428571429, 0.625], - [0.08928571428571429, 0.625], - [0.125, 0.625], - [0.125, 0.625], - [0.16071428571428573, 0.625], - [0.16071428571428573, 0.625], - [0.19642857142857142, 0.625], - [0.19642857142857142, 0.625], - [0.23214285714285715, 0.625], - [0.23214285714285715, 0.625], - [0.26785714285714285, 0.625], - [0.26785714285714285, 0.625], - [0.30357142857142855, 0.625], - [0.30357142857142855, 0.625], - [0.3392857142857143, 0.625], - [0.3392857142857143, 0.625], - [0.375, 0.625], - [0.375, 0.625], - [0.4107142857142857, 0.625], - [0.4107142857142857, 0.625], - [0.44642857142857145, 0.625], - [0.44642857142857145, 0.625], - [0.48214285714285715, 0.625], - [0.48214285714285715, 0.625], - [0.5178571428571429, 0.625], - [0.5178571428571429, 0.625], - [0.5535714285714286, 0.625], - [0.5535714285714286, 0.625], - [0.5892857142857143, 0.625], - [0.5892857142857143, 0.625], - [0.625, 0.625], - [0.625, 0.625], - [0.6607142857142857, 0.625], - [0.6607142857142857, 0.625], - [0.6964285714285714, 0.625], - [0.6964285714285714, 0.625], - [0.7321428571428571, 0.625], - [0.7321428571428571, 0.625], - [0.7678571428571429, 0.625], - [0.7678571428571429, 0.625], - [0.8035714285714286, 0.625], - [0.8035714285714286, 0.625], - [0.8392857142857143, 0.625], - [0.8392857142857143, 0.625], - [0.875, 0.625], - [0.875, 0.625], - [0.9107142857142857, 0.625], - [0.9107142857142857, 0.625], - [0.9464285714285714, 0.625], - [0.9464285714285714, 0.625], - [0.9821428571428571, 0.625], - [0.9821428571428571, 0.625], - [0.017857142857142856, 0.6607142857142857], - [0.017857142857142856, 0.6607142857142857], - [0.05357142857142857, 0.6607142857142857], - [0.05357142857142857, 0.6607142857142857], - [0.08928571428571429, 0.6607142857142857], - [0.08928571428571429, 0.6607142857142857], - [0.125, 0.6607142857142857], - [0.125, 0.6607142857142857], - [0.16071428571428573, 0.6607142857142857], - [0.16071428571428573, 0.6607142857142857], - [0.19642857142857142, 0.6607142857142857], - [0.19642857142857142, 0.6607142857142857], - [0.23214285714285715, 0.6607142857142857], - [0.23214285714285715, 0.6607142857142857], - [0.26785714285714285, 0.6607142857142857], - [0.26785714285714285, 0.6607142857142857], - [0.30357142857142855, 0.6607142857142857], - [0.30357142857142855, 0.6607142857142857], - [0.3392857142857143, 0.6607142857142857], - [0.3392857142857143, 0.6607142857142857], - [0.375, 0.6607142857142857], - [0.375, 0.6607142857142857], - [0.4107142857142857, 0.6607142857142857], - [0.4107142857142857, 0.6607142857142857], - [0.44642857142857145, 0.6607142857142857], - [0.44642857142857145, 0.6607142857142857], - [0.48214285714285715, 0.6607142857142857], - [0.48214285714285715, 0.6607142857142857], - [0.5178571428571429, 0.6607142857142857], - [0.5178571428571429, 0.6607142857142857], - [0.5535714285714286, 0.6607142857142857], - [0.5535714285714286, 0.6607142857142857], - [0.5892857142857143, 0.6607142857142857], - [0.5892857142857143, 0.6607142857142857], - [0.625, 0.6607142857142857], - [0.625, 0.6607142857142857], - [0.6607142857142857, 0.6607142857142857], - [0.6607142857142857, 0.6607142857142857], - [0.6964285714285714, 0.6607142857142857], - [0.6964285714285714, 0.6607142857142857], - [0.7321428571428571, 0.6607142857142857], - [0.7321428571428571, 0.6607142857142857], - [0.7678571428571429, 0.6607142857142857], - [0.7678571428571429, 0.6607142857142857], - [0.8035714285714286, 0.6607142857142857], - [0.8035714285714286, 0.6607142857142857], - [0.8392857142857143, 0.6607142857142857], - [0.8392857142857143, 0.6607142857142857], - [0.875, 0.6607142857142857], - [0.875, 0.6607142857142857], - [0.9107142857142857, 0.6607142857142857], - [0.9107142857142857, 0.6607142857142857], - [0.9464285714285714, 0.6607142857142857], - [0.9464285714285714, 0.6607142857142857], - [0.9821428571428571, 0.6607142857142857], - [0.9821428571428571, 0.6607142857142857], - [0.017857142857142856, 0.6964285714285714], - [0.017857142857142856, 0.6964285714285714], - [0.05357142857142857, 0.6964285714285714], - [0.05357142857142857, 0.6964285714285714], - [0.08928571428571429, 0.6964285714285714], - [0.08928571428571429, 0.6964285714285714], - [0.125, 0.6964285714285714], - [0.125, 0.6964285714285714], - [0.16071428571428573, 0.6964285714285714], - [0.16071428571428573, 0.6964285714285714], - [0.19642857142857142, 0.6964285714285714], - [0.19642857142857142, 0.6964285714285714], - [0.23214285714285715, 0.6964285714285714], - [0.23214285714285715, 0.6964285714285714], - [0.26785714285714285, 0.6964285714285714], - [0.26785714285714285, 0.6964285714285714], - [0.30357142857142855, 0.6964285714285714], - [0.30357142857142855, 0.6964285714285714], - [0.3392857142857143, 0.6964285714285714], - [0.3392857142857143, 0.6964285714285714], - [0.375, 0.6964285714285714], - [0.375, 0.6964285714285714], - [0.4107142857142857, 0.6964285714285714], - [0.4107142857142857, 0.6964285714285714], - [0.44642857142857145, 0.6964285714285714], - [0.44642857142857145, 0.6964285714285714], - [0.48214285714285715, 0.6964285714285714], - [0.48214285714285715, 0.6964285714285714], - [0.5178571428571429, 0.6964285714285714], - [0.5178571428571429, 0.6964285714285714], - [0.5535714285714286, 0.6964285714285714], - [0.5535714285714286, 0.6964285714285714], - [0.5892857142857143, 0.6964285714285714], - [0.5892857142857143, 0.6964285714285714], - [0.625, 0.6964285714285714], - [0.625, 0.6964285714285714], - [0.6607142857142857, 0.6964285714285714], - [0.6607142857142857, 0.6964285714285714], - [0.6964285714285714, 0.6964285714285714], - [0.6964285714285714, 0.6964285714285714], - [0.7321428571428571, 0.6964285714285714], - [0.7321428571428571, 0.6964285714285714], - [0.7678571428571429, 0.6964285714285714], - [0.7678571428571429, 0.6964285714285714], - [0.8035714285714286, 0.6964285714285714], - [0.8035714285714286, 0.6964285714285714], - [0.8392857142857143, 0.6964285714285714], - [0.8392857142857143, 0.6964285714285714], - [0.875, 0.6964285714285714], - [0.875, 0.6964285714285714], - [0.9107142857142857, 0.6964285714285714], - [0.9107142857142857, 0.6964285714285714], - [0.9464285714285714, 0.6964285714285714], - [0.9464285714285714, 0.6964285714285714], - [0.9821428571428571, 0.6964285714285714], - [0.9821428571428571, 0.6964285714285714], - [0.017857142857142856, 0.7321428571428571], - [0.017857142857142856, 0.7321428571428571], - [0.05357142857142857, 0.7321428571428571], - [0.05357142857142857, 0.7321428571428571], - [0.08928571428571429, 0.7321428571428571], - [0.08928571428571429, 0.7321428571428571], - [0.125, 0.7321428571428571], - [0.125, 0.7321428571428571], - [0.16071428571428573, 0.7321428571428571], - [0.16071428571428573, 0.7321428571428571], - [0.19642857142857142, 0.7321428571428571], - [0.19642857142857142, 0.7321428571428571], - [0.23214285714285715, 0.7321428571428571], - [0.23214285714285715, 0.7321428571428571], - [0.26785714285714285, 0.7321428571428571], - [0.26785714285714285, 0.7321428571428571], - [0.30357142857142855, 0.7321428571428571], - [0.30357142857142855, 0.7321428571428571], - [0.3392857142857143, 0.7321428571428571], - [0.3392857142857143, 0.7321428571428571], - [0.375, 0.7321428571428571], - [0.375, 0.7321428571428571], - [0.4107142857142857, 0.7321428571428571], - [0.4107142857142857, 0.7321428571428571], - [0.44642857142857145, 0.7321428571428571], - [0.44642857142857145, 0.7321428571428571], - [0.48214285714285715, 0.7321428571428571], - [0.48214285714285715, 0.7321428571428571], - [0.5178571428571429, 0.7321428571428571], - [0.5178571428571429, 0.7321428571428571], - [0.5535714285714286, 0.7321428571428571], - [0.5535714285714286, 0.7321428571428571], - [0.5892857142857143, 0.7321428571428571], - [0.5892857142857143, 0.7321428571428571], - [0.625, 0.7321428571428571], - [0.625, 0.7321428571428571], - [0.6607142857142857, 0.7321428571428571], - [0.6607142857142857, 0.7321428571428571], - [0.6964285714285714, 0.7321428571428571], - [0.6964285714285714, 0.7321428571428571], - [0.7321428571428571, 0.7321428571428571], - [0.7321428571428571, 0.7321428571428571], - [0.7678571428571429, 0.7321428571428571], - [0.7678571428571429, 0.7321428571428571], - [0.8035714285714286, 0.7321428571428571], - [0.8035714285714286, 0.7321428571428571], - [0.8392857142857143, 0.7321428571428571], - [0.8392857142857143, 0.7321428571428571], - [0.875, 0.7321428571428571], - [0.875, 0.7321428571428571], - [0.9107142857142857, 0.7321428571428571], - [0.9107142857142857, 0.7321428571428571], - [0.9464285714285714, 0.7321428571428571], - [0.9464285714285714, 0.7321428571428571], - [0.9821428571428571, 0.7321428571428571], - [0.9821428571428571, 0.7321428571428571], - [0.017857142857142856, 0.7678571428571429], - [0.017857142857142856, 0.7678571428571429], - [0.05357142857142857, 0.7678571428571429], - [0.05357142857142857, 0.7678571428571429], - [0.08928571428571429, 0.7678571428571429], - [0.08928571428571429, 0.7678571428571429], - [0.125, 0.7678571428571429], - [0.125, 0.7678571428571429], - [0.16071428571428573, 0.7678571428571429], - [0.16071428571428573, 0.7678571428571429], - [0.19642857142857142, 0.7678571428571429], - [0.19642857142857142, 0.7678571428571429], - [0.23214285714285715, 0.7678571428571429], - [0.23214285714285715, 0.7678571428571429], - [0.26785714285714285, 0.7678571428571429], - [0.26785714285714285, 0.7678571428571429], - [0.30357142857142855, 0.7678571428571429], - [0.30357142857142855, 0.7678571428571429], - [0.3392857142857143, 0.7678571428571429], - [0.3392857142857143, 0.7678571428571429], - [0.375, 0.7678571428571429], - [0.375, 0.7678571428571429], - [0.4107142857142857, 0.7678571428571429], - [0.4107142857142857, 0.7678571428571429], - [0.44642857142857145, 0.7678571428571429], - [0.44642857142857145, 0.7678571428571429], - [0.48214285714285715, 0.7678571428571429], - [0.48214285714285715, 0.7678571428571429], - [0.5178571428571429, 0.7678571428571429], - [0.5178571428571429, 0.7678571428571429], - [0.5535714285714286, 0.7678571428571429], - [0.5535714285714286, 0.7678571428571429], - [0.5892857142857143, 0.7678571428571429], - [0.5892857142857143, 0.7678571428571429], - [0.625, 0.7678571428571429], - [0.625, 0.7678571428571429], - [0.6607142857142857, 0.7678571428571429], - [0.6607142857142857, 0.7678571428571429], - [0.6964285714285714, 0.7678571428571429], - [0.6964285714285714, 0.7678571428571429], - [0.7321428571428571, 0.7678571428571429], - [0.7321428571428571, 0.7678571428571429], - [0.7678571428571429, 0.7678571428571429], - [0.7678571428571429, 0.7678571428571429], - [0.8035714285714286, 0.7678571428571429], - [0.8035714285714286, 0.7678571428571429], - [0.8392857142857143, 0.7678571428571429], - [0.8392857142857143, 0.7678571428571429], - [0.875, 0.7678571428571429], - [0.875, 0.7678571428571429], - [0.9107142857142857, 0.7678571428571429], - [0.9107142857142857, 0.7678571428571429], - [0.9464285714285714, 0.7678571428571429], - [0.9464285714285714, 0.7678571428571429], - [0.9821428571428571, 0.7678571428571429], - [0.9821428571428571, 0.7678571428571429], - [0.017857142857142856, 0.8035714285714286], - [0.017857142857142856, 0.8035714285714286], - [0.05357142857142857, 0.8035714285714286], - [0.05357142857142857, 0.8035714285714286], - [0.08928571428571429, 0.8035714285714286], - [0.08928571428571429, 0.8035714285714286], - [0.125, 0.8035714285714286], - [0.125, 0.8035714285714286], - [0.16071428571428573, 0.8035714285714286], - [0.16071428571428573, 0.8035714285714286], - [0.19642857142857142, 0.8035714285714286], - [0.19642857142857142, 0.8035714285714286], - [0.23214285714285715, 0.8035714285714286], - [0.23214285714285715, 0.8035714285714286], - [0.26785714285714285, 0.8035714285714286], - [0.26785714285714285, 0.8035714285714286], - [0.30357142857142855, 0.8035714285714286], - [0.30357142857142855, 0.8035714285714286], - [0.3392857142857143, 0.8035714285714286], - [0.3392857142857143, 0.8035714285714286], - [0.375, 0.8035714285714286], - [0.375, 0.8035714285714286], - [0.4107142857142857, 0.8035714285714286], - [0.4107142857142857, 0.8035714285714286], - [0.44642857142857145, 0.8035714285714286], - [0.44642857142857145, 0.8035714285714286], - [0.48214285714285715, 0.8035714285714286], - [0.48214285714285715, 0.8035714285714286], - [0.5178571428571429, 0.8035714285714286], - [0.5178571428571429, 0.8035714285714286], - [0.5535714285714286, 0.8035714285714286], - [0.5535714285714286, 0.8035714285714286], - [0.5892857142857143, 0.8035714285714286], - [0.5892857142857143, 0.8035714285714286], - [0.625, 0.8035714285714286], - [0.625, 0.8035714285714286], - [0.6607142857142857, 0.8035714285714286], - [0.6607142857142857, 0.8035714285714286], - [0.6964285714285714, 0.8035714285714286], - [0.6964285714285714, 0.8035714285714286], - [0.7321428571428571, 0.8035714285714286], - [0.7321428571428571, 0.8035714285714286], - [0.7678571428571429, 0.8035714285714286], - [0.7678571428571429, 0.8035714285714286], - [0.8035714285714286, 0.8035714285714286], - [0.8035714285714286, 0.8035714285714286], - [0.8392857142857143, 0.8035714285714286], - [0.8392857142857143, 0.8035714285714286], - [0.875, 0.8035714285714286], - [0.875, 0.8035714285714286], - [0.9107142857142857, 0.8035714285714286], - [0.9107142857142857, 0.8035714285714286], - [0.9464285714285714, 0.8035714285714286], - [0.9464285714285714, 0.8035714285714286], - [0.9821428571428571, 0.8035714285714286], - [0.9821428571428571, 0.8035714285714286], - [0.017857142857142856, 0.8392857142857143], - [0.017857142857142856, 0.8392857142857143], - [0.05357142857142857, 0.8392857142857143], - [0.05357142857142857, 0.8392857142857143], - [0.08928571428571429, 0.8392857142857143], - [0.08928571428571429, 0.8392857142857143], - [0.125, 0.8392857142857143], - [0.125, 0.8392857142857143], - [0.16071428571428573, 0.8392857142857143], - [0.16071428571428573, 0.8392857142857143], - [0.19642857142857142, 0.8392857142857143], - [0.19642857142857142, 0.8392857142857143], - [0.23214285714285715, 0.8392857142857143], - [0.23214285714285715, 0.8392857142857143], - [0.26785714285714285, 0.8392857142857143], - [0.26785714285714285, 0.8392857142857143], - [0.30357142857142855, 0.8392857142857143], - [0.30357142857142855, 0.8392857142857143], - [0.3392857142857143, 0.8392857142857143], - [0.3392857142857143, 0.8392857142857143], - [0.375, 0.8392857142857143], - [0.375, 0.8392857142857143], - [0.4107142857142857, 0.8392857142857143], - [0.4107142857142857, 0.8392857142857143], - [0.44642857142857145, 0.8392857142857143], - [0.44642857142857145, 0.8392857142857143], - [0.48214285714285715, 0.8392857142857143], - [0.48214285714285715, 0.8392857142857143], - [0.5178571428571429, 0.8392857142857143], - [0.5178571428571429, 0.8392857142857143], - [0.5535714285714286, 0.8392857142857143], - [0.5535714285714286, 0.8392857142857143], - [0.5892857142857143, 0.8392857142857143], - [0.5892857142857143, 0.8392857142857143], - [0.625, 0.8392857142857143], - [0.625, 0.8392857142857143], - [0.6607142857142857, 0.8392857142857143], - [0.6607142857142857, 0.8392857142857143], - [0.6964285714285714, 0.8392857142857143], - [0.6964285714285714, 0.8392857142857143], - [0.7321428571428571, 0.8392857142857143], - [0.7321428571428571, 0.8392857142857143], - [0.7678571428571429, 0.8392857142857143], - [0.7678571428571429, 0.8392857142857143], - [0.8035714285714286, 0.8392857142857143], - [0.8035714285714286, 0.8392857142857143], - [0.8392857142857143, 0.8392857142857143], - [0.8392857142857143, 0.8392857142857143], - [0.875, 0.8392857142857143], - [0.875, 0.8392857142857143], - [0.9107142857142857, 0.8392857142857143], - [0.9107142857142857, 0.8392857142857143], - [0.9464285714285714, 0.8392857142857143], - [0.9464285714285714, 0.8392857142857143], - [0.9821428571428571, 0.8392857142857143], - [0.9821428571428571, 0.8392857142857143], - [0.017857142857142856, 0.875], - [0.017857142857142856, 0.875], - [0.05357142857142857, 0.875], - [0.05357142857142857, 0.875], - [0.08928571428571429, 0.875], - [0.08928571428571429, 0.875], - [0.125, 0.875], - [0.125, 0.875], - [0.16071428571428573, 0.875], - [0.16071428571428573, 0.875], - [0.19642857142857142, 0.875], - [0.19642857142857142, 0.875], - [0.23214285714285715, 0.875], - [0.23214285714285715, 0.875], - [0.26785714285714285, 0.875], - [0.26785714285714285, 0.875], - [0.30357142857142855, 0.875], - [0.30357142857142855, 0.875], - [0.3392857142857143, 0.875], - [0.3392857142857143, 0.875], - [0.375, 0.875], - [0.375, 0.875], - [0.4107142857142857, 0.875], - [0.4107142857142857, 0.875], - [0.44642857142857145, 0.875], - [0.44642857142857145, 0.875], - [0.48214285714285715, 0.875], - [0.48214285714285715, 0.875], - [0.5178571428571429, 0.875], - [0.5178571428571429, 0.875], - [0.5535714285714286, 0.875], - [0.5535714285714286, 0.875], - [0.5892857142857143, 0.875], - [0.5892857142857143, 0.875], - [0.625, 0.875], - [0.625, 0.875], - [0.6607142857142857, 0.875], - [0.6607142857142857, 0.875], - [0.6964285714285714, 0.875], - [0.6964285714285714, 0.875], - [0.7321428571428571, 0.875], - [0.7321428571428571, 0.875], - [0.7678571428571429, 0.875], - [0.7678571428571429, 0.875], - [0.8035714285714286, 0.875], - [0.8035714285714286, 0.875], - [0.8392857142857143, 0.875], - [0.8392857142857143, 0.875], - [0.875, 0.875], - [0.875, 0.875], - [0.9107142857142857, 0.875], - [0.9107142857142857, 0.875], - [0.9464285714285714, 0.875], - [0.9464285714285714, 0.875], - [0.9821428571428571, 0.875], - [0.9821428571428571, 0.875], - [0.017857142857142856, 0.9107142857142857], - [0.017857142857142856, 0.9107142857142857], - [0.05357142857142857, 0.9107142857142857], - [0.05357142857142857, 0.9107142857142857], - [0.08928571428571429, 0.9107142857142857], - [0.08928571428571429, 0.9107142857142857], - [0.125, 0.9107142857142857], - [0.125, 0.9107142857142857], - [0.16071428571428573, 0.9107142857142857], - [0.16071428571428573, 0.9107142857142857], - [0.19642857142857142, 0.9107142857142857], - [0.19642857142857142, 0.9107142857142857], - [0.23214285714285715, 0.9107142857142857], - [0.23214285714285715, 0.9107142857142857], - [0.26785714285714285, 0.9107142857142857], - [0.26785714285714285, 0.9107142857142857], - [0.30357142857142855, 0.9107142857142857], - [0.30357142857142855, 0.9107142857142857], - [0.3392857142857143, 0.9107142857142857], - [0.3392857142857143, 0.9107142857142857], - [0.375, 0.9107142857142857], - [0.375, 0.9107142857142857], - [0.4107142857142857, 0.9107142857142857], - [0.4107142857142857, 0.9107142857142857], - [0.44642857142857145, 0.9107142857142857], - [0.44642857142857145, 0.9107142857142857], - [0.48214285714285715, 0.9107142857142857], - [0.48214285714285715, 0.9107142857142857], - [0.5178571428571429, 0.9107142857142857], - [0.5178571428571429, 0.9107142857142857], - [0.5535714285714286, 0.9107142857142857], - [0.5535714285714286, 0.9107142857142857], - [0.5892857142857143, 0.9107142857142857], - [0.5892857142857143, 0.9107142857142857], - [0.625, 0.9107142857142857], - [0.625, 0.9107142857142857], - [0.6607142857142857, 0.9107142857142857], - [0.6607142857142857, 0.9107142857142857], - [0.6964285714285714, 0.9107142857142857], - [0.6964285714285714, 0.9107142857142857], - [0.7321428571428571, 0.9107142857142857], - [0.7321428571428571, 0.9107142857142857], - [0.7678571428571429, 0.9107142857142857], - [0.7678571428571429, 0.9107142857142857], - [0.8035714285714286, 0.9107142857142857], - [0.8035714285714286, 0.9107142857142857], - [0.8392857142857143, 0.9107142857142857], - [0.8392857142857143, 0.9107142857142857], - [0.875, 0.9107142857142857], - [0.875, 0.9107142857142857], - [0.9107142857142857, 0.9107142857142857], - [0.9107142857142857, 0.9107142857142857], - [0.9464285714285714, 0.9107142857142857], - [0.9464285714285714, 0.9107142857142857], - [0.9821428571428571, 0.9107142857142857], - [0.9821428571428571, 0.9107142857142857], - [0.017857142857142856, 0.9464285714285714], - [0.017857142857142856, 0.9464285714285714], - [0.05357142857142857, 0.9464285714285714], - [0.05357142857142857, 0.9464285714285714], - [0.08928571428571429, 0.9464285714285714], - [0.08928571428571429, 0.9464285714285714], - [0.125, 0.9464285714285714], - [0.125, 0.9464285714285714], - [0.16071428571428573, 0.9464285714285714], - [0.16071428571428573, 0.9464285714285714], - [0.19642857142857142, 0.9464285714285714], - [0.19642857142857142, 0.9464285714285714], - [0.23214285714285715, 0.9464285714285714], - [0.23214285714285715, 0.9464285714285714], - [0.26785714285714285, 0.9464285714285714], - [0.26785714285714285, 0.9464285714285714], - [0.30357142857142855, 0.9464285714285714], - [0.30357142857142855, 0.9464285714285714], - [0.3392857142857143, 0.9464285714285714], - [0.3392857142857143, 0.9464285714285714], - [0.375, 0.9464285714285714], - [0.375, 0.9464285714285714], - [0.4107142857142857, 0.9464285714285714], - [0.4107142857142857, 0.9464285714285714], - [0.44642857142857145, 0.9464285714285714], - [0.44642857142857145, 0.9464285714285714], - [0.48214285714285715, 0.9464285714285714], - [0.48214285714285715, 0.9464285714285714], - [0.5178571428571429, 0.9464285714285714], - [0.5178571428571429, 0.9464285714285714], - [0.5535714285714286, 0.9464285714285714], - [0.5535714285714286, 0.9464285714285714], - [0.5892857142857143, 0.9464285714285714], - [0.5892857142857143, 0.9464285714285714], - [0.625, 0.9464285714285714], - [0.625, 0.9464285714285714], - [0.6607142857142857, 0.9464285714285714], - [0.6607142857142857, 0.9464285714285714], - [0.6964285714285714, 0.9464285714285714], - [0.6964285714285714, 0.9464285714285714], - [0.7321428571428571, 0.9464285714285714], - [0.7321428571428571, 0.9464285714285714], - [0.7678571428571429, 0.9464285714285714], - [0.7678571428571429, 0.9464285714285714], - [0.8035714285714286, 0.9464285714285714], - [0.8035714285714286, 0.9464285714285714], - [0.8392857142857143, 0.9464285714285714], - [0.8392857142857143, 0.9464285714285714], - [0.875, 0.9464285714285714], - [0.875, 0.9464285714285714], - [0.9107142857142857, 0.9464285714285714], - [0.9107142857142857, 0.9464285714285714], - [0.9464285714285714, 0.9464285714285714], - [0.9464285714285714, 0.9464285714285714], - [0.9821428571428571, 0.9464285714285714], - [0.9821428571428571, 0.9464285714285714], - [0.017857142857142856, 0.9821428571428571], - [0.017857142857142856, 0.9821428571428571], - [0.05357142857142857, 0.9821428571428571], - [0.05357142857142857, 0.9821428571428571], - [0.08928571428571429, 0.9821428571428571], - [0.08928571428571429, 0.9821428571428571], - [0.125, 0.9821428571428571], - [0.125, 0.9821428571428571], - [0.16071428571428573, 0.9821428571428571], - [0.16071428571428573, 0.9821428571428571], - [0.19642857142857142, 0.9821428571428571], - [0.19642857142857142, 0.9821428571428571], - [0.23214285714285715, 0.9821428571428571], - [0.23214285714285715, 0.9821428571428571], - [0.26785714285714285, 0.9821428571428571], - [0.26785714285714285, 0.9821428571428571], - [0.30357142857142855, 0.9821428571428571], - [0.30357142857142855, 0.9821428571428571], - [0.3392857142857143, 0.9821428571428571], - [0.3392857142857143, 0.9821428571428571], - [0.375, 0.9821428571428571], - [0.375, 0.9821428571428571], - [0.4107142857142857, 0.9821428571428571], - [0.4107142857142857, 0.9821428571428571], - [0.44642857142857145, 0.9821428571428571], - [0.44642857142857145, 0.9821428571428571], - [0.48214285714285715, 0.9821428571428571], - [0.48214285714285715, 0.9821428571428571], - [0.5178571428571429, 0.9821428571428571], - [0.5178571428571429, 0.9821428571428571], - [0.5535714285714286, 0.9821428571428571], - [0.5535714285714286, 0.9821428571428571], - [0.5892857142857143, 0.9821428571428571], - [0.5892857142857143, 0.9821428571428571], - [0.625, 0.9821428571428571], - [0.625, 0.9821428571428571], - [0.6607142857142857, 0.9821428571428571], - [0.6607142857142857, 0.9821428571428571], - [0.6964285714285714, 0.9821428571428571], - [0.6964285714285714, 0.9821428571428571], - [0.7321428571428571, 0.9821428571428571], - [0.7321428571428571, 0.9821428571428571], - [0.7678571428571429, 0.9821428571428571], - [0.7678571428571429, 0.9821428571428571], - [0.8035714285714286, 0.9821428571428571], - [0.8035714285714286, 0.9821428571428571], - [0.8392857142857143, 0.9821428571428571], - [0.8392857142857143, 0.9821428571428571], - [0.875, 0.9821428571428571], - [0.875, 0.9821428571428571], - [0.9107142857142857, 0.9821428571428571], - [0.9107142857142857, 0.9821428571428571], - [0.9464285714285714, 0.9821428571428571], - [0.9464285714285714, 0.9821428571428571], - [0.9821428571428571, 0.9821428571428571], - [0.9821428571428571, 0.9821428571428571], - [0.03571428571428571, 0.03571428571428571], - [0.03571428571428571, 0.03571428571428571], - [0.10714285714285714, 0.03571428571428571], - [0.10714285714285714, 0.03571428571428571], - [0.17857142857142858, 0.03571428571428571], - [0.17857142857142858, 0.03571428571428571], - [0.25, 0.03571428571428571], - [0.25, 0.03571428571428571], - [0.32142857142857145, 0.03571428571428571], - [0.32142857142857145, 0.03571428571428571], - [0.39285714285714285, 0.03571428571428571], - [0.39285714285714285, 0.03571428571428571], - [0.4642857142857143, 0.03571428571428571], - [0.4642857142857143, 0.03571428571428571], - [0.5357142857142857, 0.03571428571428571], - [0.5357142857142857, 0.03571428571428571], - [0.6071428571428571, 0.03571428571428571], - [0.6071428571428571, 0.03571428571428571], - [0.6785714285714286, 0.03571428571428571], - [0.6785714285714286, 0.03571428571428571], - [0.75, 0.03571428571428571], - [0.75, 0.03571428571428571], - [0.8214285714285714, 0.03571428571428571], - [0.8214285714285714, 0.03571428571428571], - [0.8928571428571429, 0.03571428571428571], - [0.8928571428571429, 0.03571428571428571], - [0.9642857142857143, 0.03571428571428571], - [0.9642857142857143, 0.03571428571428571], - [0.03571428571428571, 0.10714285714285714], - [0.03571428571428571, 0.10714285714285714], - [0.10714285714285714, 0.10714285714285714], - [0.10714285714285714, 0.10714285714285714], - [0.17857142857142858, 0.10714285714285714], - [0.17857142857142858, 0.10714285714285714], - [0.25, 0.10714285714285714], - [0.25, 0.10714285714285714], - [0.32142857142857145, 0.10714285714285714], - [0.32142857142857145, 0.10714285714285714], - [0.39285714285714285, 0.10714285714285714], - [0.39285714285714285, 0.10714285714285714], - [0.4642857142857143, 0.10714285714285714], - [0.4642857142857143, 0.10714285714285714], - [0.5357142857142857, 0.10714285714285714], - [0.5357142857142857, 0.10714285714285714], - [0.6071428571428571, 0.10714285714285714], - [0.6071428571428571, 0.10714285714285714], - [0.6785714285714286, 0.10714285714285714], - [0.6785714285714286, 0.10714285714285714], - [0.75, 0.10714285714285714], - [0.75, 0.10714285714285714], - [0.8214285714285714, 0.10714285714285714], - [0.8214285714285714, 0.10714285714285714], - [0.8928571428571429, 0.10714285714285714], - [0.8928571428571429, 0.10714285714285714], - [0.9642857142857143, 0.10714285714285714], - [0.9642857142857143, 0.10714285714285714], - [0.03571428571428571, 0.17857142857142858], - [0.03571428571428571, 0.17857142857142858], - [0.10714285714285714, 0.17857142857142858], - [0.10714285714285714, 0.17857142857142858], - [0.17857142857142858, 0.17857142857142858], - [0.17857142857142858, 0.17857142857142858], - [0.25, 0.17857142857142858], - [0.25, 0.17857142857142858], - [0.32142857142857145, 0.17857142857142858], - [0.32142857142857145, 0.17857142857142858], - [0.39285714285714285, 0.17857142857142858], - [0.39285714285714285, 0.17857142857142858], - [0.4642857142857143, 0.17857142857142858], - [0.4642857142857143, 0.17857142857142858], - [0.5357142857142857, 0.17857142857142858], - [0.5357142857142857, 0.17857142857142858], - [0.6071428571428571, 0.17857142857142858], - [0.6071428571428571, 0.17857142857142858], - [0.6785714285714286, 0.17857142857142858], - [0.6785714285714286, 0.17857142857142858], - [0.75, 0.17857142857142858], - [0.75, 0.17857142857142858], - [0.8214285714285714, 0.17857142857142858], - [0.8214285714285714, 0.17857142857142858], - [0.8928571428571429, 0.17857142857142858], - [0.8928571428571429, 0.17857142857142858], - [0.9642857142857143, 0.17857142857142858], - [0.9642857142857143, 0.17857142857142858], - [0.03571428571428571, 0.25], - [0.03571428571428571, 0.25], - [0.10714285714285714, 0.25], - [0.10714285714285714, 0.25], - [0.17857142857142858, 0.25], - [0.17857142857142858, 0.25], - [0.25, 0.25], - [0.25, 0.25], - [0.32142857142857145, 0.25], - [0.32142857142857145, 0.25], - [0.39285714285714285, 0.25], - [0.39285714285714285, 0.25], - [0.4642857142857143, 0.25], - [0.4642857142857143, 0.25], - [0.5357142857142857, 0.25], - [0.5357142857142857, 0.25], - [0.6071428571428571, 0.25], - [0.6071428571428571, 0.25], - [0.6785714285714286, 0.25], - [0.6785714285714286, 0.25], - [0.75, 0.25], - [0.75, 0.25], - [0.8214285714285714, 0.25], - [0.8214285714285714, 0.25], - [0.8928571428571429, 0.25], - [0.8928571428571429, 0.25], - [0.9642857142857143, 0.25], - [0.9642857142857143, 0.25], - [0.03571428571428571, 0.32142857142857145], - [0.03571428571428571, 0.32142857142857145], - [0.10714285714285714, 0.32142857142857145], - [0.10714285714285714, 0.32142857142857145], - [0.17857142857142858, 0.32142857142857145], - [0.17857142857142858, 0.32142857142857145], - [0.25, 0.32142857142857145], - [0.25, 0.32142857142857145], - [0.32142857142857145, 0.32142857142857145], - [0.32142857142857145, 0.32142857142857145], - [0.39285714285714285, 0.32142857142857145], - [0.39285714285714285, 0.32142857142857145], - [0.4642857142857143, 0.32142857142857145], - [0.4642857142857143, 0.32142857142857145], - [0.5357142857142857, 0.32142857142857145], - [0.5357142857142857, 0.32142857142857145], - [0.6071428571428571, 0.32142857142857145], - [0.6071428571428571, 0.32142857142857145], - [0.6785714285714286, 0.32142857142857145], - [0.6785714285714286, 0.32142857142857145], - [0.75, 0.32142857142857145], - [0.75, 0.32142857142857145], - [0.8214285714285714, 0.32142857142857145], - [0.8214285714285714, 0.32142857142857145], - [0.8928571428571429, 0.32142857142857145], - [0.8928571428571429, 0.32142857142857145], - [0.9642857142857143, 0.32142857142857145], - [0.9642857142857143, 0.32142857142857145], - [0.03571428571428571, 0.39285714285714285], - [0.03571428571428571, 0.39285714285714285], - [0.10714285714285714, 0.39285714285714285], - [0.10714285714285714, 0.39285714285714285], - [0.17857142857142858, 0.39285714285714285], - [0.17857142857142858, 0.39285714285714285], - [0.25, 0.39285714285714285], - [0.25, 0.39285714285714285], - [0.32142857142857145, 0.39285714285714285], - [0.32142857142857145, 0.39285714285714285], - [0.39285714285714285, 0.39285714285714285], - [0.39285714285714285, 0.39285714285714285], - [0.4642857142857143, 0.39285714285714285], - [0.4642857142857143, 0.39285714285714285], - [0.5357142857142857, 0.39285714285714285], - [0.5357142857142857, 0.39285714285714285], - [0.6071428571428571, 0.39285714285714285], - [0.6071428571428571, 0.39285714285714285], - [0.6785714285714286, 0.39285714285714285], - [0.6785714285714286, 0.39285714285714285], - [0.75, 0.39285714285714285], - [0.75, 0.39285714285714285], - [0.8214285714285714, 0.39285714285714285], - [0.8214285714285714, 0.39285714285714285], - [0.8928571428571429, 0.39285714285714285], - [0.8928571428571429, 0.39285714285714285], - [0.9642857142857143, 0.39285714285714285], - [0.9642857142857143, 0.39285714285714285], - [0.03571428571428571, 0.4642857142857143], - [0.03571428571428571, 0.4642857142857143], - [0.10714285714285714, 0.4642857142857143], - [0.10714285714285714, 0.4642857142857143], - [0.17857142857142858, 0.4642857142857143], - [0.17857142857142858, 0.4642857142857143], - [0.25, 0.4642857142857143], - [0.25, 0.4642857142857143], - [0.32142857142857145, 0.4642857142857143], - [0.32142857142857145, 0.4642857142857143], - [0.39285714285714285, 0.4642857142857143], - [0.39285714285714285, 0.4642857142857143], - [0.4642857142857143, 0.4642857142857143], - [0.4642857142857143, 0.4642857142857143], - [0.5357142857142857, 0.4642857142857143], - [0.5357142857142857, 0.4642857142857143], - [0.6071428571428571, 0.4642857142857143], - [0.6071428571428571, 0.4642857142857143], - [0.6785714285714286, 0.4642857142857143], - [0.6785714285714286, 0.4642857142857143], - [0.75, 0.4642857142857143], - [0.75, 0.4642857142857143], - [0.8214285714285714, 0.4642857142857143], - [0.8214285714285714, 0.4642857142857143], - [0.8928571428571429, 0.4642857142857143], - [0.8928571428571429, 0.4642857142857143], - [0.9642857142857143, 0.4642857142857143], - [0.9642857142857143, 0.4642857142857143], - [0.03571428571428571, 0.5357142857142857], - [0.03571428571428571, 0.5357142857142857], - [0.10714285714285714, 0.5357142857142857], - [0.10714285714285714, 0.5357142857142857], - [0.17857142857142858, 0.5357142857142857], - [0.17857142857142858, 0.5357142857142857], - [0.25, 0.5357142857142857], - [0.25, 0.5357142857142857], - [0.32142857142857145, 0.5357142857142857], - [0.32142857142857145, 0.5357142857142857], - [0.39285714285714285, 0.5357142857142857], - [0.39285714285714285, 0.5357142857142857], - [0.4642857142857143, 0.5357142857142857], - [0.4642857142857143, 0.5357142857142857], - [0.5357142857142857, 0.5357142857142857], - [0.5357142857142857, 0.5357142857142857], - [0.6071428571428571, 0.5357142857142857], - [0.6071428571428571, 0.5357142857142857], - [0.6785714285714286, 0.5357142857142857], - [0.6785714285714286, 0.5357142857142857], - [0.75, 0.5357142857142857], - [0.75, 0.5357142857142857], - [0.8214285714285714, 0.5357142857142857], - [0.8214285714285714, 0.5357142857142857], - [0.8928571428571429, 0.5357142857142857], - [0.8928571428571429, 0.5357142857142857], - [0.9642857142857143, 0.5357142857142857], - [0.9642857142857143, 0.5357142857142857], - [0.03571428571428571, 0.6071428571428571], - [0.03571428571428571, 0.6071428571428571], - [0.10714285714285714, 0.6071428571428571], - [0.10714285714285714, 0.6071428571428571], - [0.17857142857142858, 0.6071428571428571], - [0.17857142857142858, 0.6071428571428571], - [0.25, 0.6071428571428571], - [0.25, 0.6071428571428571], - [0.32142857142857145, 0.6071428571428571], - [0.32142857142857145, 0.6071428571428571], - [0.39285714285714285, 0.6071428571428571], - [0.39285714285714285, 0.6071428571428571], - [0.4642857142857143, 0.6071428571428571], - [0.4642857142857143, 0.6071428571428571], - [0.5357142857142857, 0.6071428571428571], - [0.5357142857142857, 0.6071428571428571], - [0.6071428571428571, 0.6071428571428571], - [0.6071428571428571, 0.6071428571428571], - [0.6785714285714286, 0.6071428571428571], - [0.6785714285714286, 0.6071428571428571], - [0.75, 0.6071428571428571], - [0.75, 0.6071428571428571], - [0.8214285714285714, 0.6071428571428571], - [0.8214285714285714, 0.6071428571428571], - [0.8928571428571429, 0.6071428571428571], - [0.8928571428571429, 0.6071428571428571], - [0.9642857142857143, 0.6071428571428571], - [0.9642857142857143, 0.6071428571428571], - [0.03571428571428571, 0.6785714285714286], - [0.03571428571428571, 0.6785714285714286], - [0.10714285714285714, 0.6785714285714286], - [0.10714285714285714, 0.6785714285714286], - [0.17857142857142858, 0.6785714285714286], - [0.17857142857142858, 0.6785714285714286], - [0.25, 0.6785714285714286], - [0.25, 0.6785714285714286], - [0.32142857142857145, 0.6785714285714286], - [0.32142857142857145, 0.6785714285714286], - [0.39285714285714285, 0.6785714285714286], - [0.39285714285714285, 0.6785714285714286], - [0.4642857142857143, 0.6785714285714286], - [0.4642857142857143, 0.6785714285714286], - [0.5357142857142857, 0.6785714285714286], - [0.5357142857142857, 0.6785714285714286], - [0.6071428571428571, 0.6785714285714286], - [0.6071428571428571, 0.6785714285714286], - [0.6785714285714286, 0.6785714285714286], - [0.6785714285714286, 0.6785714285714286], - [0.75, 0.6785714285714286], - [0.75, 0.6785714285714286], - [0.8214285714285714, 0.6785714285714286], - [0.8214285714285714, 0.6785714285714286], - [0.8928571428571429, 0.6785714285714286], - [0.8928571428571429, 0.6785714285714286], - [0.9642857142857143, 0.6785714285714286], - [0.9642857142857143, 0.6785714285714286], - [0.03571428571428571, 0.75], - [0.03571428571428571, 0.75], - [0.10714285714285714, 0.75], - [0.10714285714285714, 0.75], - [0.17857142857142858, 0.75], - [0.17857142857142858, 0.75], - [0.25, 0.75], - [0.25, 0.75], - [0.32142857142857145, 0.75], - [0.32142857142857145, 0.75], - [0.39285714285714285, 0.75], - [0.39285714285714285, 0.75], - [0.4642857142857143, 0.75], - [0.4642857142857143, 0.75], - [0.5357142857142857, 0.75], - [0.5357142857142857, 0.75], - [0.6071428571428571, 0.75], - [0.6071428571428571, 0.75], - [0.6785714285714286, 0.75], - [0.6785714285714286, 0.75], - [0.75, 0.75], - [0.75, 0.75], - [0.8214285714285714, 0.75], - [0.8214285714285714, 0.75], - [0.8928571428571429, 0.75], - [0.8928571428571429, 0.75], - [0.9642857142857143, 0.75], - [0.9642857142857143, 0.75], - [0.03571428571428571, 0.8214285714285714], - [0.03571428571428571, 0.8214285714285714], - [0.10714285714285714, 0.8214285714285714], - [0.10714285714285714, 0.8214285714285714], - [0.17857142857142858, 0.8214285714285714], - [0.17857142857142858, 0.8214285714285714], - [0.25, 0.8214285714285714], - [0.25, 0.8214285714285714], - [0.32142857142857145, 0.8214285714285714], - [0.32142857142857145, 0.8214285714285714], - [0.39285714285714285, 0.8214285714285714], - [0.39285714285714285, 0.8214285714285714], - [0.4642857142857143, 0.8214285714285714], - [0.4642857142857143, 0.8214285714285714], - [0.5357142857142857, 0.8214285714285714], - [0.5357142857142857, 0.8214285714285714], - [0.6071428571428571, 0.8214285714285714], - [0.6071428571428571, 0.8214285714285714], - [0.6785714285714286, 0.8214285714285714], - [0.6785714285714286, 0.8214285714285714], - [0.75, 0.8214285714285714], - [0.75, 0.8214285714285714], - [0.8214285714285714, 0.8214285714285714], - [0.8214285714285714, 0.8214285714285714], - [0.8928571428571429, 0.8214285714285714], - [0.8928571428571429, 0.8214285714285714], - [0.9642857142857143, 0.8214285714285714], - [0.9642857142857143, 0.8214285714285714], - [0.03571428571428571, 0.8928571428571429], - [0.03571428571428571, 0.8928571428571429], - [0.10714285714285714, 0.8928571428571429], - [0.10714285714285714, 0.8928571428571429], - [0.17857142857142858, 0.8928571428571429], - [0.17857142857142858, 0.8928571428571429], - [0.25, 0.8928571428571429], - [0.25, 0.8928571428571429], - [0.32142857142857145, 0.8928571428571429], - [0.32142857142857145, 0.8928571428571429], - [0.39285714285714285, 0.8928571428571429], - [0.39285714285714285, 0.8928571428571429], - [0.4642857142857143, 0.8928571428571429], - [0.4642857142857143, 0.8928571428571429], - [0.5357142857142857, 0.8928571428571429], - [0.5357142857142857, 0.8928571428571429], - [0.6071428571428571, 0.8928571428571429], - [0.6071428571428571, 0.8928571428571429], - [0.6785714285714286, 0.8928571428571429], - [0.6785714285714286, 0.8928571428571429], - [0.75, 0.8928571428571429], - [0.75, 0.8928571428571429], - [0.8214285714285714, 0.8928571428571429], - [0.8214285714285714, 0.8928571428571429], - [0.8928571428571429, 0.8928571428571429], - [0.8928571428571429, 0.8928571428571429], - [0.9642857142857143, 0.8928571428571429], - [0.9642857142857143, 0.8928571428571429], - [0.03571428571428571, 0.9642857142857143], - [0.03571428571428571, 0.9642857142857143], - [0.10714285714285714, 0.9642857142857143], - [0.10714285714285714, 0.9642857142857143], - [0.17857142857142858, 0.9642857142857143], - [0.17857142857142858, 0.9642857142857143], - [0.25, 0.9642857142857143], - [0.25, 0.9642857142857143], - [0.32142857142857145, 0.9642857142857143], - [0.32142857142857145, 0.9642857142857143], - [0.39285714285714285, 0.9642857142857143], - [0.39285714285714285, 0.9642857142857143], - [0.4642857142857143, 0.9642857142857143], - [0.4642857142857143, 0.9642857142857143], - [0.5357142857142857, 0.9642857142857143], - [0.5357142857142857, 0.9642857142857143], - [0.6071428571428571, 0.9642857142857143], - [0.6071428571428571, 0.9642857142857143], - [0.6785714285714286, 0.9642857142857143], - [0.6785714285714286, 0.9642857142857143], - [0.75, 0.9642857142857143], - [0.75, 0.9642857142857143], - [0.8214285714285714, 0.9642857142857143], - [0.8214285714285714, 0.9642857142857143], - [0.8928571428571429, 0.9642857142857143], - [0.8928571428571429, 0.9642857142857143], - [0.9642857142857143, 0.9642857142857143], - [0.9642857142857143, 0.9642857142857143], - [0.07142857142857142, 0.07142857142857142], - [0.07142857142857142, 0.07142857142857142], - [0.07142857142857142, 0.07142857142857142], - [0.07142857142857142, 0.07142857142857142], - [0.07142857142857142, 0.07142857142857142], - [0.07142857142857142, 0.07142857142857142], - [0.21428571428571427, 0.07142857142857142], - [0.21428571428571427, 0.07142857142857142], - [0.21428571428571427, 0.07142857142857142], - [0.21428571428571427, 0.07142857142857142], - [0.21428571428571427, 0.07142857142857142], - [0.21428571428571427, 0.07142857142857142], - [0.35714285714285715, 0.07142857142857142], - [0.35714285714285715, 0.07142857142857142], - [0.35714285714285715, 0.07142857142857142], - [0.35714285714285715, 0.07142857142857142], - [0.35714285714285715, 0.07142857142857142], - [0.35714285714285715, 0.07142857142857142], - [0.5, 0.07142857142857142], - [0.5, 0.07142857142857142], - [0.5, 0.07142857142857142], - [0.5, 0.07142857142857142], - [0.5, 0.07142857142857142], - [0.5, 0.07142857142857142], - [0.6428571428571429, 0.07142857142857142], - [0.6428571428571429, 0.07142857142857142], - [0.6428571428571429, 0.07142857142857142], - [0.6428571428571429, 0.07142857142857142], - [0.6428571428571429, 0.07142857142857142], - [0.6428571428571429, 0.07142857142857142], - [0.7857142857142857, 0.07142857142857142], - [0.7857142857142857, 0.07142857142857142], - [0.7857142857142857, 0.07142857142857142], - [0.7857142857142857, 0.07142857142857142], - [0.7857142857142857, 0.07142857142857142], - [0.7857142857142857, 0.07142857142857142], - [0.9285714285714286, 0.07142857142857142], - [0.9285714285714286, 0.07142857142857142], - [0.9285714285714286, 0.07142857142857142], - [0.9285714285714286, 0.07142857142857142], - [0.9285714285714286, 0.07142857142857142], - [0.9285714285714286, 0.07142857142857142], - [0.07142857142857142, 0.21428571428571427], - [0.07142857142857142, 0.21428571428571427], - [0.07142857142857142, 0.21428571428571427], - [0.07142857142857142, 0.21428571428571427], - [0.07142857142857142, 0.21428571428571427], - [0.07142857142857142, 0.21428571428571427], - [0.21428571428571427, 0.21428571428571427], - [0.21428571428571427, 0.21428571428571427], - [0.21428571428571427, 0.21428571428571427], - [0.21428571428571427, 0.21428571428571427], - [0.21428571428571427, 0.21428571428571427], - [0.21428571428571427, 0.21428571428571427], - [0.35714285714285715, 0.21428571428571427], - [0.35714285714285715, 0.21428571428571427], - [0.35714285714285715, 0.21428571428571427], - [0.35714285714285715, 0.21428571428571427], - [0.35714285714285715, 0.21428571428571427], - [0.35714285714285715, 0.21428571428571427], - [0.5, 0.21428571428571427], - [0.5, 0.21428571428571427], - [0.5, 0.21428571428571427], - [0.5, 0.21428571428571427], - [0.5, 0.21428571428571427], - [0.5, 0.21428571428571427], - [0.6428571428571429, 0.21428571428571427], - [0.6428571428571429, 0.21428571428571427], - [0.6428571428571429, 0.21428571428571427], - [0.6428571428571429, 0.21428571428571427], - [0.6428571428571429, 0.21428571428571427], - [0.6428571428571429, 0.21428571428571427], - [0.7857142857142857, 0.21428571428571427], - [0.7857142857142857, 0.21428571428571427], - [0.7857142857142857, 0.21428571428571427], - [0.7857142857142857, 0.21428571428571427], - [0.7857142857142857, 0.21428571428571427], - [0.7857142857142857, 0.21428571428571427], - [0.9285714285714286, 0.21428571428571427], - [0.9285714285714286, 0.21428571428571427], - [0.9285714285714286, 0.21428571428571427], - [0.9285714285714286, 0.21428571428571427], - [0.9285714285714286, 0.21428571428571427], - [0.9285714285714286, 0.21428571428571427], - [0.07142857142857142, 0.35714285714285715], - [0.07142857142857142, 0.35714285714285715], - [0.07142857142857142, 0.35714285714285715], - [0.07142857142857142, 0.35714285714285715], - [0.07142857142857142, 0.35714285714285715], - [0.07142857142857142, 0.35714285714285715], - [0.21428571428571427, 0.35714285714285715], - [0.21428571428571427, 0.35714285714285715], - [0.21428571428571427, 0.35714285714285715], - [0.21428571428571427, 0.35714285714285715], - [0.21428571428571427, 0.35714285714285715], - [0.21428571428571427, 0.35714285714285715], - [0.35714285714285715, 0.35714285714285715], - [0.35714285714285715, 0.35714285714285715], - [0.35714285714285715, 0.35714285714285715], - [0.35714285714285715, 0.35714285714285715], - [0.35714285714285715, 0.35714285714285715], - [0.35714285714285715, 0.35714285714285715], - [0.5, 0.35714285714285715], - [0.5, 0.35714285714285715], - [0.5, 0.35714285714285715], - [0.5, 0.35714285714285715], - [0.5, 0.35714285714285715], - [0.5, 0.35714285714285715], - [0.6428571428571429, 0.35714285714285715], - [0.6428571428571429, 0.35714285714285715], - [0.6428571428571429, 0.35714285714285715], - [0.6428571428571429, 0.35714285714285715], - [0.6428571428571429, 0.35714285714285715], - [0.6428571428571429, 0.35714285714285715], - [0.7857142857142857, 0.35714285714285715], - [0.7857142857142857, 0.35714285714285715], - [0.7857142857142857, 0.35714285714285715], - [0.7857142857142857, 0.35714285714285715], - [0.7857142857142857, 0.35714285714285715], - [0.7857142857142857, 0.35714285714285715], - [0.9285714285714286, 0.35714285714285715], - [0.9285714285714286, 0.35714285714285715], - [0.9285714285714286, 0.35714285714285715], - [0.9285714285714286, 0.35714285714285715], - [0.9285714285714286, 0.35714285714285715], - [0.9285714285714286, 0.35714285714285715], - [0.07142857142857142, 0.5], - [0.07142857142857142, 0.5], - [0.07142857142857142, 0.5], - [0.07142857142857142, 0.5], - [0.07142857142857142, 0.5], - [0.07142857142857142, 0.5], - [0.21428571428571427, 0.5], - [0.21428571428571427, 0.5], - [0.21428571428571427, 0.5], - [0.21428571428571427, 0.5], - [0.21428571428571427, 0.5], - [0.21428571428571427, 0.5], - [0.35714285714285715, 0.5], - [0.35714285714285715, 0.5], - [0.35714285714285715, 0.5], - [0.35714285714285715, 0.5], - [0.35714285714285715, 0.5], - [0.35714285714285715, 0.5], - [0.5, 0.5], - [0.5, 0.5], - [0.5, 0.5], - [0.5, 0.5], - [0.5, 0.5], - [0.5, 0.5], - [0.6428571428571429, 0.5], - [0.6428571428571429, 0.5], - [0.6428571428571429, 0.5], - [0.6428571428571429, 0.5], - [0.6428571428571429, 0.5], - [0.6428571428571429, 0.5], - [0.7857142857142857, 0.5], - [0.7857142857142857, 0.5], - [0.7857142857142857, 0.5], - [0.7857142857142857, 0.5], - [0.7857142857142857, 0.5], - [0.7857142857142857, 0.5], - [0.9285714285714286, 0.5], - [0.9285714285714286, 0.5], - [0.9285714285714286, 0.5], - [0.9285714285714286, 0.5], - [0.9285714285714286, 0.5], - [0.9285714285714286, 0.5], - [0.07142857142857142, 0.6428571428571429], - [0.07142857142857142, 0.6428571428571429], - [0.07142857142857142, 0.6428571428571429], - [0.07142857142857142, 0.6428571428571429], - [0.07142857142857142, 0.6428571428571429], - [0.07142857142857142, 0.6428571428571429], - [0.21428571428571427, 0.6428571428571429], - [0.21428571428571427, 0.6428571428571429], - [0.21428571428571427, 0.6428571428571429], - [0.21428571428571427, 0.6428571428571429], - [0.21428571428571427, 0.6428571428571429], - [0.21428571428571427, 0.6428571428571429], - [0.35714285714285715, 0.6428571428571429], - [0.35714285714285715, 0.6428571428571429], - [0.35714285714285715, 0.6428571428571429], - [0.35714285714285715, 0.6428571428571429], - [0.35714285714285715, 0.6428571428571429], - [0.35714285714285715, 0.6428571428571429], - [0.5, 0.6428571428571429], - [0.5, 0.6428571428571429], - [0.5, 0.6428571428571429], - [0.5, 0.6428571428571429], - [0.5, 0.6428571428571429], - [0.5, 0.6428571428571429], - [0.6428571428571429, 0.6428571428571429], - [0.6428571428571429, 0.6428571428571429], - [0.6428571428571429, 0.6428571428571429], - [0.6428571428571429, 0.6428571428571429], - [0.6428571428571429, 0.6428571428571429], - [0.6428571428571429, 0.6428571428571429], - [0.7857142857142857, 0.6428571428571429], - [0.7857142857142857, 0.6428571428571429], - [0.7857142857142857, 0.6428571428571429], - [0.7857142857142857, 0.6428571428571429], - [0.7857142857142857, 0.6428571428571429], - [0.7857142857142857, 0.6428571428571429], - [0.9285714285714286, 0.6428571428571429], - [0.9285714285714286, 0.6428571428571429], - [0.9285714285714286, 0.6428571428571429], - [0.9285714285714286, 0.6428571428571429], - [0.9285714285714286, 0.6428571428571429], - [0.9285714285714286, 0.6428571428571429], - [0.07142857142857142, 0.7857142857142857], - [0.07142857142857142, 0.7857142857142857], - [0.07142857142857142, 0.7857142857142857], - [0.07142857142857142, 0.7857142857142857], - [0.07142857142857142, 0.7857142857142857], - [0.07142857142857142, 0.7857142857142857], - [0.21428571428571427, 0.7857142857142857], - [0.21428571428571427, 0.7857142857142857], - [0.21428571428571427, 0.7857142857142857], - [0.21428571428571427, 0.7857142857142857], - [0.21428571428571427, 0.7857142857142857], - [0.21428571428571427, 0.7857142857142857], - [0.35714285714285715, 0.7857142857142857], - [0.35714285714285715, 0.7857142857142857], - [0.35714285714285715, 0.7857142857142857], - [0.35714285714285715, 0.7857142857142857], - [0.35714285714285715, 0.7857142857142857], - [0.35714285714285715, 0.7857142857142857], - [0.5, 0.7857142857142857], - [0.5, 0.7857142857142857], - [0.5, 0.7857142857142857], - [0.5, 0.7857142857142857], - [0.5, 0.7857142857142857], - [0.5, 0.7857142857142857], - [0.6428571428571429, 0.7857142857142857], - [0.6428571428571429, 0.7857142857142857], - [0.6428571428571429, 0.7857142857142857], - [0.6428571428571429, 0.7857142857142857], - [0.6428571428571429, 0.7857142857142857], - [0.6428571428571429, 0.7857142857142857], - [0.7857142857142857, 0.7857142857142857], - [0.7857142857142857, 0.7857142857142857], - [0.7857142857142857, 0.7857142857142857], - [0.7857142857142857, 0.7857142857142857], - [0.7857142857142857, 0.7857142857142857], - [0.7857142857142857, 0.7857142857142857], - [0.9285714285714286, 0.7857142857142857], - [0.9285714285714286, 0.7857142857142857], - [0.9285714285714286, 0.7857142857142857], - [0.9285714285714286, 0.7857142857142857], - [0.9285714285714286, 0.7857142857142857], - [0.9285714285714286, 0.7857142857142857], - [0.07142857142857142, 0.9285714285714286], - [0.07142857142857142, 0.9285714285714286], - [0.07142857142857142, 0.9285714285714286], - [0.07142857142857142, 0.9285714285714286], - [0.07142857142857142, 0.9285714285714286], - [0.07142857142857142, 0.9285714285714286], - [0.21428571428571427, 0.9285714285714286], - [0.21428571428571427, 0.9285714285714286], - [0.21428571428571427, 0.9285714285714286], - [0.21428571428571427, 0.9285714285714286], - [0.21428571428571427, 0.9285714285714286], - [0.21428571428571427, 0.9285714285714286], - [0.35714285714285715, 0.9285714285714286], - [0.35714285714285715, 0.9285714285714286], - [0.35714285714285715, 0.9285714285714286], - [0.35714285714285715, 0.9285714285714286], - [0.35714285714285715, 0.9285714285714286], - [0.35714285714285715, 0.9285714285714286], - [0.5, 0.9285714285714286], - [0.5, 0.9285714285714286], - [0.5, 0.9285714285714286], - [0.5, 0.9285714285714286], - [0.5, 0.9285714285714286], - [0.5, 0.9285714285714286], - [0.6428571428571429, 0.9285714285714286], - [0.6428571428571429, 0.9285714285714286], - [0.6428571428571429, 0.9285714285714286], - [0.6428571428571429, 0.9285714285714286], - [0.6428571428571429, 0.9285714285714286], - [0.6428571428571429, 0.9285714285714286], - [0.7857142857142857, 0.9285714285714286], - [0.7857142857142857, 0.9285714285714286], - [0.7857142857142857, 0.9285714285714286], - [0.7857142857142857, 0.9285714285714286], - [0.7857142857142857, 0.9285714285714286], - [0.7857142857142857, 0.9285714285714286], - [0.9285714285714286, 0.9285714285714286], - [0.9285714285714286, 0.9285714285714286], - [0.9285714285714286, 0.9285714285714286], - [0.9285714285714286, 0.9285714285714286], - [0.9285714285714286, 0.9285714285714286], - [0.9285714285714286, 0.9285714285714286]], dtype=np.float32) diff --git a/models/person_detection_mediapipe/person_detection_mediapipe_2023mar.onnx b/models/person_detection_mediapipe/person_detection_mediapipe_2023mar.onnx deleted file mode 100644 index 1780b51b..00000000 --- a/models/person_detection_mediapipe/person_detection_mediapipe_2023mar.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:47fd5599d6fa17608f03e0eb0ae230baa6e597d7e8a2c8199fe00abea55a701f -size 11990159 diff --git a/models/person_detection_mediapipe/person_detection_mediapipe_2023mar_int8bq.onnx b/models/person_detection_mediapipe/person_detection_mediapipe_2023mar_int8bq.onnx deleted file mode 100644 index 39078315..00000000 --- a/models/person_detection_mediapipe/person_detection_mediapipe_2023mar_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c5ed8c00c028b98e5d2c55b920a6e975af6c4cd538cfeea7c054f4fbbd8b9075 -size 3482053 diff --git a/models/person_reid_youtureid/CMakeLists.txt b/models/person_reid_youtureid/CMakeLists.txt deleted file mode 100644 index b8745c63..00000000 --- a/models/person_reid_youtureid/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.24.0) -project(opencv_zoo_person_reid_youtureid) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(demo demo.cpp) -target_link_libraries(demo ${OpenCV_LIBS}) \ No newline at end of file diff --git a/models/person_reid_youtureid/LICENSE b/models/person_reid_youtureid/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/person_reid_youtureid/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/person_reid_youtureid/README.md b/models/person_reid_youtureid/README.md deleted file mode 100644 index 650b1ab3..00000000 --- a/models/person_reid_youtureid/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# Youtu ReID Baseline - -This model is provided by Tencent Youtu Lab [[Credits]](https://github.com/opencv/opencv/blob/394e640909d5d8edf9c1f578f8216d513373698c/samples/dnn/person_reid.py#L6-L11). - -**Note**: -- Model source: https://github.com/ReID-Team/ReID_extra_testdata -- `person_reid_youtu_2021nov_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -Run the following command to try the demo: - -### Python -```shell -python demo.py --query_dir /path/to/query --gallery_dir /path/to/gallery -v - -# get help regarding various parameters -python demo.py --help -``` - -### C++ -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -./build/demo --query_dir=/path/to/query --gallery_dir=/path/to/gallery -v - -# get help regarding various parameters -./build/demo --help -``` - -### License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference: - -- OpenCV DNN Sample: https://github.com/opencv/opencv/blob/4.x/samples/dnn/person_reid.py -- Model source: https://github.com/ReID-Team/ReID_extra_testdata diff --git a/models/person_reid_youtureid/demo.cpp b/models/person_reid_youtureid/demo.cpp deleted file mode 100644 index bac7cc1f..00000000 --- a/models/person_reid_youtureid/demo.cpp +++ /dev/null @@ -1,308 +0,0 @@ -#include -#include "opencv2/dnn.hpp" -#include -#include -#include -#include -#include - - -// YoutuReID class for person re-identification -class YoutuReID { -public: - YoutuReID(const std::string& model_path, - const cv::Size& input_size = cv::Size(128, 256), - int output_dim = 768, - const cv::Scalar& mean = cv::Scalar(0.485, 0.456, 0.406), - const cv::Scalar& std = cv::Scalar(0.229, 0.224, 0.225), - int backend_id = 0, - int target_id = 0) - : model_path_(model_path), input_size_(input_size), - output_dim_(output_dim), mean_(mean), std_(std), - backend_id_(backend_id), target_id_(target_id) - { - - model_ = cv::dnn::readNet(model_path_); - model_.setPreferableBackend(backend_id_); - model_.setPreferableTarget(target_id_); - } - - void setBackendAndTarget(int backend_id, int target_id) { - backend_id_ = backend_id; - target_id_ = target_id; - model_.setPreferableBackend(backend_id_); - model_.setPreferableTarget(target_id_); - } - - void setInputSize(const cv::Size& input_size) { - input_size_ = input_size; - } - - // Preprocess image by resizing, normalizing, and creating a blob - cv::Mat preprocess(const cv::Mat& image) { - cv::Mat img; - cv::cvtColor(image, img, cv::COLOR_BGR2RGB); - img.convertTo(img, CV_32F, 1.0 / 255.0); - - // Normalize each channel separately - std::vector channels(3); - cv::split(img, channels); - channels[0] = (channels[0] - mean_[0]) / std_[0]; - channels[1] = (channels[1] - mean_[1]) / std_[1]; - channels[2] = (channels[2] - mean_[2]) / std_[2]; - cv::merge(channels, img); - - return cv::dnn::blobFromImage(img); - } - - // Run inference to extract feature vector - cv::Mat infer(const cv::Mat& image) { - cv::Mat input_blob = preprocess(image); - model_.setInput(input_blob); - cv::Mat features = model_.forward(); - - if (features.dims == 4 && features.size[2] == 1 && features.size[3] == 1) { - features = features.reshape(1, {1, features.size[1]}); - } - - return features; - } - - // Perform query, comparing each query image to each gallery image - std::vector> query(const std::vector& query_img_list, - const std::vector& gallery_img_list, - int topK = 5) { - std::vector query_features_list, gallery_features_list; - cv::Mat query_features, gallery_features; - - for (size_t i = 0; i < query_img_list.size(); ++i) { - cv::Mat feature = infer(query_img_list[i]); - query_features_list.push_back(feature.clone()); - } - cv::vconcat(query_features_list, query_features); - normalizeFeatures(query_features); - - for (size_t i = 0; i < gallery_img_list.size(); ++i) { - cv::Mat feature = infer(gallery_img_list[i]); - gallery_features_list.push_back(feature.clone()); - } - cv::vconcat(gallery_features_list, gallery_features); - normalizeFeatures(gallery_features); - - cv::Mat dist = query_features * gallery_features.t(); - return getTopK(dist, topK); - } - -private: - // Normalize feature vectors row-wise to unit length - void normalizeFeatures(cv::Mat& features) { - const float epsilon = 1e-6; - for (int i = 0; i < features.rows; ++i) { - cv::Mat featureRow = features.row(i); - float norm = cv::norm(featureRow, cv::NORM_L2); - if (norm < epsilon) { - norm = epsilon; - } - featureRow /= norm; - } - } - - // Retrieve Top-K indices from similarity matrix - std::vector> getTopK(const cv::Mat& dist, int topK) { - std::vector> indices(dist.rows); - - for (int i = 0; i < dist.rows; ++i) { - std::vector> sim_index_pairs; - for (int j = 0; j < dist.cols; ++j) { - sim_index_pairs.emplace_back(dist.at(i, j), j); - } - std::sort(sim_index_pairs.begin(), sim_index_pairs.end(), - [](const std::pair& a, const std::pair& b) { - return a.first > b.first; - }); - - for (int k = 0; k < topK && k < sim_index_pairs.size(); ++k) { - indices[i].push_back(sim_index_pairs[k].second); - } - } - return indices; - } - - std::string model_path_; - cv::Size input_size_; - int output_dim_; - cv::Scalar mean_, std_; - int backend_id_; - int target_id_; - cv::dnn::Net model_; -}; - -// Read images from directory and return a pair of image list and file list -std::pair, std::vector> readImagesFromDirectory(const std::string& img_dir, int w = 128, int h = 256) { - std::vector img_list; - std::vector file_list; - - std::vector file_names; - cv::glob(img_dir + "/*", file_names, false); - - for (size_t i = 0; i < file_names.size(); ++i) { - std::string file_name = file_names[i].substr(file_names[i].find_last_of("/\\") + 1); - cv::Mat img = cv::imread(file_names[i]); - if (!img.empty()) { - cv::resize(img, img, cv::Size(w, h)); - img_list.push_back(img); - file_list.push_back(file_name); - } - } - return std::make_pair(img_list, file_list); -} - -// Visualize query and gallery results by creating concatenated images -std::map visualize( - const std::map>& results, - const std::string& query_dir, - const std::string& gallery_dir, - const cv::Size& output_size = cv::Size(128, 384)) { - - std::map results_vis; - - for (std::map>::const_iterator it = results.begin(); it != results.end(); ++it) { - const std::string& query_file = it->first; - const std::vector& top_matches = it->second; - - cv::Mat query_img = cv::imread(query_dir + "/" + query_file); - if (query_img.empty()) continue; - - cv::resize(query_img, query_img, output_size); - cv::copyMakeBorder(query_img, query_img, 5, 5, 5, 5, - cv::BORDER_CONSTANT, cv::Scalar(0, 0, 0)); - cv::putText(query_img, "Query", cv::Point(10, 30), - cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(0, 255, 0), 2); - - cv::Mat concat_img = query_img; - - for (size_t i = 0; i < top_matches.size(); ++i) { - cv::Mat gallery_img = cv::imread(gallery_dir + "/" + top_matches[i]); - if (gallery_img.empty()) continue; - - cv::resize(gallery_img, gallery_img, output_size); - cv::copyMakeBorder(gallery_img, gallery_img, 5, 5, 5, 5, - cv::BORDER_CONSTANT, cv::Scalar(255, 255, 255)); - cv::putText(gallery_img, "G" + std::to_string(i), cv::Point(10, 30), - cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(0, 255, 0), 2); - - cv::hconcat(concat_img, gallery_img, concat_img); - } - results_vis[query_file] = concat_img; - } - return results_vis; -} - -void printHelpMessage() { - std::cout << "usage: demo.cpp [-h] [--query_dir QUERY_DIR] [--gallery_dir GALLERY_DIR] " - << "[--backend_target BACKEND_TARGET] [--topk TOPK] [--model MODEL] [--save] [--vis]\n\n" - << "ReID baseline models from Tencent Youtu Lab\n\n" - << "optional arguments:\n" - << " -h, --help show this help message and exit\n" - << " --query_dir QUERY_DIR, -q QUERY_DIR\n" - << " Query directory.\n" - << " --gallery_dir GALLERY_DIR, -g GALLERY_DIR\n" - << " Gallery directory.\n" - << " --backend_target BACKEND_TARGET, -bt BACKEND_TARGET\n" - << " Choose one of the backend-target pair to run this demo: 0: (default) OpenCV implementation + " - "CPU, 1: CUDA + GPU (CUDA), 2: CUDA + GPU (CUDA FP16), 3: TIM-VX + NPU, 4: CANN + NPU\n" - << " --topk TOPK Top-K closest from gallery for each query.\n" - << " --model MODEL, -m MODEL\n" - << " Path to the model.\n" - << " --save, -s Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in " - "case of camera input.\n" - << " --vis, -v Usage: Specify to open a new window to show results. Invalid in case of camera input.\n"; -} - -int main(int argc, char** argv) { - // CommandLineParser setup - cv::CommandLineParser parser(argc, argv, - "{help h | | Show help message.}" - "{query_dir q | | Query directory.}" - "{gallery_dir g | | Gallery directory.}" - "{backend_target bt | 0 | Choose one of the backend-target pair to run this demo: 0: (default) OpenCV implementation + CPU, " - "1: CUDA + GPU (CUDA), 2: CUDA + GPU (CUDA FP16), 3: TIM-VX + NPU, 4: CANN + NPU}" - "{topk k | 10 | Top-K closest from gallery for each query.}" - "{model m | person_reid_youtu_2021nov.onnx | Path to the model.}" - "{save s | false | Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.}" - "{vis v | false | Usage: Specify to open a new window to show results. Invalid in case of camera input.}"); - - if (parser.has("help")) { - printHelpMessage(); - return 0; - } - - std::string query_dir = parser.get("query_dir"); - std::string gallery_dir = parser.get("gallery_dir"); - int backend_target = parser.get("backend_target"); - int topK = parser.get("topk"); - std::string model_path = parser.get("model"); - bool save_flag = parser.get("save"); - bool vis_flag = parser.get("vis"); - - if (!parser.check()) { - parser.printErrors(); - return 1; - } - - const std::vector> backend_target_pairs = { - {cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_CPU}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA_FP16}, - {cv::dnn::DNN_BACKEND_TIMVX, cv::dnn::DNN_TARGET_NPU}, - {cv::dnn::DNN_BACKEND_CANN, cv::dnn::DNN_TARGET_NPU} - }; - - int backend_id = backend_target_pairs[backend_target].first; - int target_id = backend_target_pairs[backend_target].second; - - YoutuReID reid(model_path, cv::Size(128, 256), 768, - cv::Scalar(0.485, 0.456, 0.406), - cv::Scalar(0.229, 0.224, 0.225), - backend_id, target_id); - - std::pair, std::vector> query_data = readImagesFromDirectory(query_dir); - std::pair, std::vector> gallery_data = readImagesFromDirectory(gallery_dir); - - std::vector> indices = reid.query(query_data.first, gallery_data.first, topK); - - std::map> results; - for (size_t i = 0; i < query_data.second.size(); ++i) { - std::vector top_matches; - for (int idx : indices[i]) { - top_matches.push_back(gallery_data.second[idx]); - } - results[query_data.second[i]] = top_matches; - std::cout << "Query: " << query_data.second[i] << "\n"; - std::cout << "\tTop-" << topK << " from gallery: "; - for (size_t j = 0; j < top_matches.size(); ++j) { - std::cout << top_matches[j] << " "; - } - std::cout << std::endl; - } - - std::map results_vis = visualize(results, query_dir, gallery_dir); - - if (save_flag) { - for (std::map::iterator it = results_vis.begin(); it != results_vis.end(); ++it) { - std::string save_path = "result-" + it->first; - cv::imwrite(save_path, it->second); - } - } - - if (vis_flag) { - for (std::map::iterator it = results_vis.begin(); it != results_vis.end(); ++it) { - cv::namedWindow("result-" + it->first, cv::WINDOW_AUTOSIZE); - cv::imshow("result-" + it->first, it->second); - cv::waitKey(0); - cv::destroyAllWindows(); - } - } - - return 0; -} diff --git a/models/person_reid_youtureid/demo.py b/models/person_reid_youtureid/demo.py deleted file mode 100644 index 5160b918..00000000 --- a/models/person_reid_youtureid/demo.py +++ /dev/null @@ -1,124 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import os -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from youtureid import YoutuReID - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser( - description="ReID baseline models from Tencent Youtu Lab") -parser.add_argument('--query_dir', '-q', type=str, - help='Query directory.') -parser.add_argument('--gallery_dir', '-g', type=str, - help='Gallery directory.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--topk', type=int, default=10, - help='Top-K closest from gallery for each query.') -parser.add_argument('--model', '-m', type=str, default='person_reid_youtu_2021nov.onnx', - help='Path to the model.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def readImageFromDirectory(img_dir, w=128, h=256): - img_list = [] - file_list = os.listdir(img_dir) - for f in file_list: - img = cv.imread(os.path.join(img_dir, f)) - img = cv.resize(img, (w, h)) - img_list.append(img) - return img_list, file_list - -def visualize(results, query_dir, gallery_dir, output_size=(128, 384)): - def addBorder(img, color, borderSize=5): - border = cv.copyMakeBorder(img, top=borderSize, bottom=borderSize, left=borderSize, right=borderSize, borderType=cv.BORDER_CONSTANT, value=color) - return border - - results_vis = dict.fromkeys(results.keys(), None) - for f, topk_f in results.items(): - query_img = cv.imread(os.path.join(query_dir, f)) - query_img = cv.resize(query_img, output_size) - query_img = addBorder(query_img, [0, 0, 0]) - cv.putText(query_img, 'Query', (10, 30), cv.FONT_HERSHEY_COMPLEX, 1., (0, 255, 0), 2) - - gallery_img_list = [] - for idx, gallery_f in enumerate(topk_f): - gallery_img = cv.imread(os.path.join(gallery_dir, gallery_f)) - gallery_img = cv.resize(gallery_img, output_size) - gallery_img = addBorder(gallery_img, [255, 255, 255]) - cv.putText(gallery_img, 'G{:02d}'.format(idx), (10, 30), cv.FONT_HERSHEY_COMPLEX, 1., (0, 255, 0), 2) - gallery_img_list.append(gallery_img) - - results_vis[f] = np.concatenate([query_img] + gallery_img_list, axis=1) - - return results_vis - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate YoutuReID for person ReID - net = YoutuReID(modelPath=args.model, backendId=backend_id, targetId=target_id) - - # Read images from dir - query_img_list, query_file_list = readImageFromDirectory(args.query_dir) - gallery_img_list, gallery_file_list = readImageFromDirectory(args.gallery_dir) - - # Query - topk_indices = net.query(query_img_list, gallery_img_list, args.topk) - - # Index to filename - results = dict.fromkeys(query_file_list, None) - for f, indices in zip(query_file_list, topk_indices): - topk_matches = [] - for idx in indices: - topk_matches.append(gallery_file_list[idx]) - results[f] = topk_matches - # Print - print('Query: {}'.format(f)) - print('\tTop-{} from gallery: {}'.format(args.topk, str(topk_matches))) - - # Visualize - results_vis = visualize(results, args.query_dir, args.gallery_dir) - - if args.save: - for f, img in results_vis.items(): - cv.imwrite('result-{}'.format(f), img) - - if args.vis: - for f, img in results_vis.items(): - cv.namedWindow('result-{}'.format(f), cv.WINDOW_AUTOSIZE) - cv.imshow('result-{}'.format(f), img) - cv.waitKey(0) - cv.destroyAllWindows() - diff --git a/models/person_reid_youtureid/person_reid_youtu_2021nov.onnx b/models/person_reid_youtureid/person_reid_youtu_2021nov.onnx deleted file mode 100644 index 1e642cac..00000000 --- a/models/person_reid_youtureid/person_reid_youtu_2021nov.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0579683334d4b9440221606dcb461656dd0dc64143b18f48faedaced9b4f580d -size 106878407 diff --git a/models/person_reid_youtureid/person_reid_youtu_2021nov_int8.onnx b/models/person_reid_youtureid/person_reid_youtu_2021nov_int8.onnx deleted file mode 100644 index f405ab6a..00000000 --- a/models/person_reid_youtureid/person_reid_youtu_2021nov_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4757c4cb759b79030a9870abf29c064c2ee51e079a05700690800c81b16cf245 -size 26763574 diff --git a/models/person_reid_youtureid/person_reid_youtu_2021nov_int8bq.onnx b/models/person_reid_youtureid/person_reid_youtu_2021nov_int8bq.onnx deleted file mode 100644 index 95aa550a..00000000 --- a/models/person_reid_youtureid/person_reid_youtu_2021nov_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2b88597426335e6cd625119bdda090f9d3497bc80ba5b8a8910f65b8ccc09471 -size 29203236 diff --git a/models/person_reid_youtureid/youtureid.py b/models/person_reid_youtureid/youtureid.py deleted file mode 100644 index b2fafe16..00000000 --- a/models/person_reid_youtureid/youtureid.py +++ /dev/null @@ -1,67 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import numpy as np -import cv2 as cv - -class YoutuReID: - def __init__(self, modelPath, backendId=0, targetId=0): - self._modelPath = modelPath - self._backendId = backendId - self._targetId = targetId - - self._model = cv.dnn.readNet(modelPath) - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - self._input_size = (128, 256) # fixed - self._output_dim = 768 - self._mean = (0.485, 0.456, 0.406) - self._std = (0.229, 0.224, 0.225) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def _preprocess(self, image): - image = image[:, :, ::-1] - image = (image / 255.0 - self._mean) / self._std - return cv.dnn.blobFromImage(image.astype(np.float32)) - # return cv.dnn.blobFromImage(image, scalefactor=(1.0/255.0), size=self._input_size, mean=self._mean) / self._std - - def infer(self, image): - # Preprocess - inputBlob = self._preprocess(image) - - # Forward - self._model.setInput(inputBlob) - features = self._model.forward() - return np.reshape(features, (features.shape[0], features.shape[1])) - - def query(self, query_img_list, gallery_img_list, topK=5): - query_features_list = [] - for q in query_img_list: - query_features_list.append(self.infer(q)) - query_features = np.concatenate(query_features_list, axis=0) - query_norm = np.linalg.norm(query_features, ord=2, axis=1, keepdims=True) - query_arr = query_features / (query_norm + np.finfo(np.float32).eps) - - gallery_features_list = [] - for g in gallery_img_list: - gallery_features_list.append(self.infer(g)) - gallery_features = np.concatenate(gallery_features_list, axis=0) - gallery_norm = np.linalg.norm(gallery_features, ord=2, axis=1, keepdims=True) - gallery_arr = gallery_features / (gallery_norm + np.finfo(np.float32).eps) - - dist = np.matmul(query_arr, gallery_arr.T) - idx = np.argsort(-dist, axis=1) - return [i[0:topK] for i in idx] diff --git a/models/pose_estimation_mediapipe/CMakeLists.txt b/models/pose_estimation_mediapipe/CMakeLists.txt deleted file mode 100644 index e49645ac..00000000 --- a/models/pose_estimation_mediapipe/CMakeLists.txt +++ /dev/null @@ -1,29 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(project_name "opencv_zoo_pose_estimation_mediapipe") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/pose_estimation_mediapipe/LICENSE b/models/pose_estimation_mediapipe/LICENSE deleted file mode 100644 index 7a4a3ea2..00000000 --- a/models/pose_estimation_mediapipe/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/pose_estimation_mediapipe/README.md b/models/pose_estimation_mediapipe/README.md deleted file mode 100644 index 30d92ac9..00000000 --- a/models/pose_estimation_mediapipe/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# Pose estimation from MediaPipe Pose - -This model estimates 33 pose keypoints and person segmentation mask per detected person from [person detector](../person_detection_mediapipe). (The image below is referenced from [MediaPipe Pose Keypoints](https://github.com/tensorflow/tfjs-models/tree/master/pose-detection#blazepose-keypoints-used-in-mediapipe-blazepose)) - -![MediaPipe Pose Landmark](examples/pose_landmarks.png) - -This model is converted from TFlite to ONNX using following tools: -- TFLite model to ONNX: https://github.com/onnx/tensorflow-onnx -- simplified by [onnx-simplifier](https://github.com/daquexian/onnx-simplifier) - -**Note**: -- Visit https://github.com/google/mediapipe/blob/master/docs/solutions/models.md#pose for models of larger scale. -- `pose_estimation_mediapipe_2023mar_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -### python - -Run the following commands to try the demo: -```bash -# detect on camera input -python demo.py -# detect on an image -python demo.py -i /path/to/image -v -``` -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/opencv_zoo_pose_estimation_mediapipe -# detect on an image -./build/opencv_zoo_pose_estimation_mediapipe -m=/path/to/model -i=/path/to/image -v -# get help messages -./build/opencv_zoo_pose_estimation_mediapipe -h -``` - -### Example outputs - -![webcam demo](./example_outputs/mpposeest_demo.webp) - -## License - -All files in this directory are licensed under [Apache 2.0 License](LICENSE). - -## Reference -- MediaPipe Pose: https://developers.google.com/mediapipe/solutions/vision/pose_landmarker -- MediaPipe pose model and model card: https://github.com/google/mediapipe/blob/master/docs/solutions/models.md#pose -- BlazePose TFJS: https://github.com/tensorflow/tfjs-models/tree/master/pose-detection/src/blazepose_tfjs diff --git a/models/pose_estimation_mediapipe/demo.cpp b/models/pose_estimation_mediapipe/demo.cpp deleted file mode 100644 index f5b97deb..00000000 --- a/models/pose_estimation_mediapipe/demo.cpp +++ /dev/null @@ -1,2850 +0,0 @@ -#include -#include -#include -#include - -#include - -const long double _M_PI = 3.141592653589793238L; -using namespace std; -using namespace cv; -using namespace dnn; - -vector< pair > backendTargetPairs = { - std::make_pair(dnn::DNN_BACKEND_OPENCV, dnn::DNN_TARGET_CPU), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA_FP16), - std::make_pair(dnn::DNN_BACKEND_TIMVX, dnn::DNN_TARGET_NPU), - std::make_pair(dnn::DNN_BACKEND_CANN, dnn::DNN_TARGET_NPU) }; - - -Mat getMediapipeAnchor(); - -class MPPersonDet { -private: - Net net; - string modelPath; - Size inputSize; - float scoreThreshold; - float nmsThreshold; - dnn::Backend backendId; - dnn::Target targetId; - int topK; - Mat anchors; - -public: - MPPersonDet(string modPath, float nmsThresh = 0.3, float scoreThresh = 0.5, int tok=5000 , dnn::Backend bId = DNN_BACKEND_DEFAULT, dnn::Target tId = DNN_TARGET_CPU) : - modelPath(modPath), nmsThreshold(nmsThresh), - scoreThreshold(scoreThresh), topK(tok), - backendId(bId), targetId(tId) - { - this->inputSize = Size(224, 224); - this->net = readNet(this->modelPath); - this->net.setPreferableBackend(this->backendId); - this->net.setPreferableTarget(this->targetId); - this->anchors = getMediapipeAnchor(); - } - - pair preprocess(Mat img) - { - Mat blob; - Image2BlobParams paramMediapipe; - paramMediapipe.datalayout = DNN_LAYOUT_NCHW; - paramMediapipe.ddepth = CV_32F; - paramMediapipe.mean = Scalar::all(127.5); - paramMediapipe.scalefactor = Scalar::all(1/127.5); - paramMediapipe.size = this->inputSize; - paramMediapipe.swapRB = true; - paramMediapipe.paddingmode = DNN_PMODE_LETTERBOX; - - double ratio = min(this->inputSize.height / double(img.rows), this->inputSize.width / double(img.cols)); - Size padBias(0, 0); - if (img.rows != this->inputSize.height || img.cols != this->inputSize.width) - { - // keep aspect ratio when resize - Size ratioSize(int(img.cols * ratio), int(img.rows* ratio)); - int padH = this->inputSize.height - ratioSize.height; - int padW = this->inputSize.width - ratioSize.width; - padBias.width = padW / 2; - padBias.height = padH / 2; - } - blob = blobFromImageWithParams(img, paramMediapipe); - padBias = Size(int(padBias.width / ratio), int(padBias.height / ratio)); - return pair(blob, padBias); - } - - Mat infer(Mat srcimg) - { - pair w = this->preprocess(srcimg); - Mat inputBlob = get<0>(w); - Size padBias = get<1>(w); - this->net.setInput(inputBlob); - vector outs; - this->net.forward(outs, this->net.getUnconnectedOutLayersNames()); - Mat predictions = this->postprocess(outs, Size(srcimg.cols, srcimg.rows), padBias); - return predictions; - } - - Mat postprocess(vector outputs, Size orgSize, Size padBias) - { - Mat score = outputs[1].reshape(0, outputs[1].size[0]); - Mat boxLandDelta = outputs[0].reshape(outputs[0].size[0], outputs[0].size[1]); - Mat boxDelta = boxLandDelta.colRange(0, 4); - Mat landmarkDelta = boxLandDelta.colRange(4, boxLandDelta.cols); - float scale = float(max(orgSize.height, orgSize.width)); - Mat mask = score < -100; - score.setTo(-100, mask); - mask = score > 100; - score.setTo(100, mask); - Mat deno; - exp(-score, deno); - divide(1.0, 1+deno, score); - boxDelta.colRange(0, 1) = boxDelta.colRange(0, 1) / this->inputSize.width; - boxDelta.colRange(1, 2) = boxDelta.colRange(1, 2) / this->inputSize.height; - boxDelta.colRange(2, 3) = boxDelta.colRange(2, 3) / this->inputSize.width; - boxDelta.colRange(3, 4) = boxDelta.colRange(3, 4) / this->inputSize.height; - Mat xy1 = (boxDelta.colRange(0, 2) - boxDelta.colRange(2, 4) / 2 + this->anchors) * scale; - Mat xy2 = (boxDelta.colRange(0, 2) + boxDelta.colRange(2, 4) / 2 + this->anchors) * scale; - Mat boxes; - hconcat(xy1, xy2, boxes); - vector< Rect2d > rotBoxes(boxes.rows); - boxes.colRange(0, 1) = boxes.colRange(0, 1) - padBias.width; - boxes.colRange(1, 2) = boxes.colRange(1, 2) - padBias.height; - boxes.colRange(2, 3) = boxes.colRange(2, 3) - padBias.width; - boxes.colRange(3, 4) = boxes.colRange(3, 4) - padBias.height; - for (int i = 0; i < boxes.rows; i++) - { - rotBoxes[i] = Rect2d(Point2d(boxes.at(i, 0), boxes.at(i, 1)), Point2d(boxes.at(i, 2), boxes.at(i, 3))); - } - vector keep; - NMSBoxes(rotBoxes, score, this->scoreThreshold, this->nmsThreshold, keep, 1.0f, this->topK); - if (keep.size() == 0) - return Mat(); - int nbCols = landmarkDelta.cols + boxes.cols + 1; - Mat candidates(int(keep.size()), nbCols, CV_32FC1); - int row = 0; - for (auto idx : keep) - { - candidates.at(row, nbCols - 1) = score.at(idx); - boxes.row(idx).copyTo(candidates.row(row).colRange(0, 4)); - candidates.at(row, 4) = (landmarkDelta.at(idx, 0) / this->inputSize.width + this->anchors.at(idx,0)) * scale - padBias.width; - candidates.at(row, 5) = (landmarkDelta.at(idx, 1) / this->inputSize.height + this->anchors.at(idx, 1))* scale - padBias.height; - candidates.at(row, 6) = (landmarkDelta.at(idx, 2) / this->inputSize.width + this->anchors.at(idx, 0))* scale - padBias.width; - candidates.at(row, 7) = (landmarkDelta.at(idx, 3) / this->inputSize.height + this->anchors.at(idx, 1))* scale - padBias.height; - candidates.at(row, 8) = (landmarkDelta.at(idx, 4) / this->inputSize.width + this->anchors.at(idx, 0))* scale - padBias.width; - candidates.at(row, 9) = (landmarkDelta.at(idx, 5) / this->inputSize.height + this->anchors.at(idx, 1))* scale - padBias.height; - candidates.at(row, 10) = (landmarkDelta.at(idx, 6) / this->inputSize.width + this->anchors.at(idx, 0))* scale - padBias.width; - candidates.at(row, 11) = (landmarkDelta.at(idx, 7) / this->inputSize.height + this->anchors.at(idx, 1))* scale - padBias.height; - row++; - } - return candidates; - - } - - -}; - -class MPPose { -private: - Net net; - string modelPath; - Size inputSize; - float confThreshold; - dnn::Backend backendId; - dnn::Target targetId; - float personBoxPreEnlargeFactor; - float personBoxEnlargeFactor; - Mat anchors; - -public: - MPPose(string modPath, float confThresh = 0.5, dnn::Backend bId = DNN_BACKEND_DEFAULT, dnn::Target tId = DNN_TARGET_CPU) : - modelPath(modPath), confThreshold(confThresh), - backendId(bId), targetId(tId) - { - this->inputSize = Size(256, 256); - this->net = readNet(this->modelPath); - this->net.setPreferableBackend(this->backendId); - this->net.setPreferableTarget(this->targetId); - this->anchors = getMediapipeAnchor(); - // RoI will be larger so the performance will be better, but preprocess will be slower.Default to 1. - this->personBoxPreEnlargeFactor = 1; - this->personBoxEnlargeFactor = 1.25; - } - - tuple preprocess(Mat image, Mat person) - { - /*** - Rotate input for inference. - Parameters: - image - input image of BGR channel order - face_bbox - human face bounding box found in image of format [[x1, y1], [x2, y2]] (top-left and bottom-right points) - person_landmarks - 4 landmarks (2 full body points, 2 upper body points) of shape [4, 2] - Returns: - rotated_person - rotated person image for inference - rotate_person_bbox - person box of interest range - angle - rotate angle for person - rotation_matrix - matrix for rotation and de-rotation - pad_bias - pad pixels of interest range - */ - // crop and pad image to interest range - Size padBias(0, 0); // left, top - Mat personKeypoints = person.colRange(4, 12).reshape(0, 4); - Point2f midHipPoint = Point2f(personKeypoints.row(0)); - Point2f fullBodyPoint = Point2f(personKeypoints.row(1)); - // # get RoI - double fullDist = norm(midHipPoint - fullBodyPoint); - Mat fullBoxf,fullBox; - Mat v1 = Mat(midHipPoint) - fullDist, v2 = Mat(midHipPoint); - vector vmat = { Mat(midHipPoint) - fullDist, Mat(midHipPoint) + fullDist }; - hconcat(vmat, fullBoxf); - // enlarge to make sure full body can be cover - Mat cBox, centerBox, whBox; - reduce(fullBoxf, centerBox, 1, REDUCE_AVG, CV_32F); - whBox = fullBoxf.col(1) - fullBoxf.col(0); - Mat newHalfSize = whBox * this->personBoxPreEnlargeFactor / 2; - vmat[0] = centerBox - newHalfSize; - vmat[1] = centerBox + newHalfSize; - hconcat(vmat, fullBox); - Mat personBox; - fullBox.convertTo(personBox, CV_32S); - // refine person bbox - Mat idx = personBox.row(0) < 0; - personBox.row(0).setTo(0, idx); - idx = personBox.row(0) >= image.cols; - personBox.row(0).setTo(image.cols , idx); - idx = personBox.row(1) < 0; - personBox.row(1).setTo(0, idx); - idx = personBox.row(1) >= image.rows; - personBox.row(1).setTo(image.rows, idx); // crop to the size of interest - - image = image(Rect(personBox.at(0, 0), personBox.at(1, 0), personBox.at(0, 1) - personBox.at(0, 0), personBox.at(1, 1) - personBox.at(1, 0))); - // pad to square - int top = int(personBox.at(1, 0) - fullBox.at(1, 0)); - int left = int(personBox.at(0, 0) - fullBox.at(0, 0)); - int bottom = int(fullBox.at(1, 1) - personBox.at(1, 1)); - int right = int(fullBox.at(0, 1) - personBox.at(0, 1)); - copyMakeBorder(image, image, top, bottom, left, right, BORDER_CONSTANT, Scalar(0, 0, 0)); - padBias = Point(padBias) + Point(personBox.col(0)) - Point(left, top); - // compute rotation - midHipPoint -= Point2f(padBias); - fullBodyPoint -= Point2f(padBias); - float radians = float(_M_PI / 2 - atan2(-(fullBodyPoint.y - midHipPoint.y), fullBodyPoint.x - midHipPoint.x)); - radians = radians - 2 * float(_M_PI) * int((radians + _M_PI) / (2 * _M_PI)); - float angle = (radians * 180 / float(_M_PI)); - // get rotation matrix* - Mat rotationMatrix = getRotationMatrix2D(midHipPoint, angle, 1.0); - // get rotated image - Mat rotatedImage; - warpAffine(image, rotatedImage, rotationMatrix, Size(image.cols, image.rows)); - // get landmark bounding box - Mat blob; - Image2BlobParams paramPoseMediapipe; - paramPoseMediapipe.datalayout = DNN_LAYOUT_NHWC; - paramPoseMediapipe.ddepth = CV_32F; - paramPoseMediapipe.mean = Scalar::all(0); - paramPoseMediapipe.scalefactor = Scalar::all(1 / 255.); - paramPoseMediapipe.size = this->inputSize; - paramPoseMediapipe.swapRB = true; - paramPoseMediapipe.paddingmode = DNN_PMODE_NULL; - blob = blobFromImageWithParams(rotatedImage, paramPoseMediapipe); // resize INTER_AREA becomes INTER_LINEAR in blobFromImage - Mat rotatedPersonBox = (Mat_(2, 2) << 0, 0, image.cols, image.rows); - - return tuple(blob, rotatedPersonBox, angle, rotationMatrix, padBias); - } - - tuple infer(Mat image, Mat person) - { - int h = image.rows; - int w = image.cols; - // Preprocess - tuple tw; - tw = this->preprocess(image, person); - Mat inputBlob = get<0>(tw); - Mat rotatedPersonBbox = get<1>(tw); - float angle = get<2>(tw); - Mat rotationMatrix = get<3>(tw); - Size padBias = get<4>(tw); - - // Forward - this->net.setInput(inputBlob); - vector outputBlob; - this->net.forward(outputBlob, this->net.getUnconnectedOutLayersNames()); - - // Postprocess - tuple results; - results = this->postprocess(outputBlob, rotatedPersonBbox, angle, rotationMatrix, padBias, Size(w, h)); - return results;// # [bbox_coords, landmarks_coords, conf] - } - - tuple postprocess(vector blob, Mat rotatedPersonBox, float angle, Mat rotationMatrix, Size padBias, Size imgSize) - { - float valConf = blob[1].at(0); - if (valConf < this->confThreshold) - return tuple(Mat(), Mat(), Mat(), Mat(), Mat(), valConf); - Mat landmarks = blob[0].reshape(0, 39); - Mat mask = blob[2]; - Mat heatmap = blob[3]; - Mat landmarksWorld = blob[4].reshape(0, 39); - - Mat deno; - // recover sigmoid score - exp(-landmarks.colRange(3, landmarks.cols), deno); - divide(1.0, 1 + deno, landmarks.colRange(3, landmarks.cols)); - // TODO: refine landmarks with heatmap. reference: https://github.com/tensorflow/tfjs-models/blob/master/pose-detection/src/blazepose_tfjs/detector.ts#L577-L582 - heatmap = heatmap.reshape(0, heatmap.size[0]); - // transform coords back to the input coords - Mat whRotatedPersonPbox = rotatedPersonBox.row(1) - rotatedPersonBox.row(0); - Mat scaleFactor = whRotatedPersonPbox.clone(); - scaleFactor.col(0) /= this->inputSize.width; - scaleFactor.col(1) /= this->inputSize.height; - landmarks.col(0) = (landmarks.col(0) - this->inputSize.width / 2) * scaleFactor.at(0); - landmarks.col(1) = (landmarks.col(1) - this->inputSize.height / 2) * scaleFactor.at(1); - landmarks.col(2) = landmarks.col(2) * max(scaleFactor.at(1), scaleFactor.at(0)); - Mat coordsRotationMatrix; - getRotationMatrix2D(Point(0, 0), angle, 1.0).convertTo(coordsRotationMatrix, CV_32F); - Mat rotatedLandmarks = landmarks.colRange(0, 2) * coordsRotationMatrix.colRange(0, 2); - hconcat(rotatedLandmarks, landmarks.colRange(2, landmarks.cols), rotatedLandmarks); - Mat rotatedLandmarksWorld = landmarksWorld.colRange(0, 2) * coordsRotationMatrix.colRange(0, 2); - hconcat(rotatedLandmarksWorld, landmarksWorld.col(2), rotatedLandmarksWorld); - // invert rotation - Mat rotationComponent = (Mat_(2, 2) <(0,0), rotationMatrix.at(1, 0), rotationMatrix.at(0, 1), rotationMatrix.at(1, 1)); - Mat translationComponent = rotationMatrix(Rect(2, 0, 1, 2)).clone(); - Mat invertedTranslation = -rotationComponent * translationComponent; - Mat inverseRotationMatrix; - hconcat(rotationComponent, invertedTranslation, inverseRotationMatrix); - Mat center, rc; - reduce(rotatedPersonBox, rc, 0, REDUCE_AVG, CV_64F); - hconcat(rc, Mat(1, 1, CV_64FC1, 1) , center); - // get box center - Mat originalCenter(2, 1, CV_64FC1); - originalCenter.at(0) = center.dot(inverseRotationMatrix.row(0)); - originalCenter.at(1) = center.dot(inverseRotationMatrix.row(1)); - for (int idxRow = 0; idxRow < rotatedLandmarks.rows; idxRow++) - { - landmarks.at(idxRow, 0) = float(rotatedLandmarks.at(idxRow, 0) + originalCenter.at(0) + padBias.width); // - landmarks.at(idxRow, 1) = float(rotatedLandmarks.at(idxRow, 1) + originalCenter.at(1) + padBias.height); // - } - // get bounding box from rotated_landmarks - double vmin0, vmin1, vmax0, vmax1; - minMaxLoc(landmarks.col(0), &vmin0, &vmax0); - minMaxLoc(landmarks.col(1), &vmin1, &vmax1); - Mat bbox = (Mat_(2, 2) << vmin0, vmin1, vmax0, vmax1); - Mat centerBox; - reduce(bbox, centerBox, 0, REDUCE_AVG, CV_32F); - Mat whBox = bbox.row(1) - bbox.row(0); - Mat newHalfSize = whBox * this->personBoxEnlargeFactor / 2; - vector vmat(2); - vmat[0] = centerBox - newHalfSize; - vmat[1] = centerBox + newHalfSize; - vconcat(vmat, bbox); - // invert rotation for mask - mask = mask.reshape(1, 256); - Mat invertRotationMatrix = getRotationMatrix2D(Point(mask.cols / 2, mask.rows / 2), -angle, 1.0); - Mat invertRotationMask; - warpAffine(mask, invertRotationMask, invertRotationMatrix, Size(mask.cols, mask.rows)); - // enlarge mask - resize(invertRotationMask, invertRotationMask, Size(int(whRotatedPersonPbox.at(0)), int(whRotatedPersonPbox.at(1)))); - // crop and pad mask - int minW = -min(padBias.width, 0); - int minH= -min(padBias.height, 0); - int left = max(padBias.width, 0); - int top = max(padBias.height, 0); - Size padOver = imgSize - Size(invertRotationMask.cols, invertRotationMask.rows) - padBias; - int maxW = min(padOver.width, 0) + invertRotationMask.cols; - int maxH = min(padOver.height, 0) + invertRotationMask.rows; - int right = max(padOver.width, 0); - int bottom = max(padOver.height, 0); - invertRotationMask = invertRotationMask(Rect(minW, minH, maxW - minW, maxH - minH)).clone(); - copyMakeBorder(invertRotationMask, invertRotationMask, top, bottom, left, right, BORDER_CONSTANT, Scalar::all(0)); - // binarize mask - threshold(invertRotationMask, invertRotationMask, 1, 255, THRESH_BINARY); - - /* 2*2 person bbox: [[x1, y1], [x2, y2]] - # 39*5 screen landmarks: 33 keypoints and 6 auxiliary points with [x, y, z, visibility, presence], z value is relative to HIP - # Visibility is probability that a keypoint is located within the frame and not occluded by another bigger body part or another object - # Presence is probability that a keypoint is located within the frame - # 39*3 world landmarks: 33 keypoints and 6 auxiliary points with [x, y, z] 3D metric x, y, z coordinate - # img_height*img_width mask: gray mask, where 255 indicates the full body of a person and 0 means background - # 64*64*39 heatmap: currently only used for refining landmarks, requires sigmod processing before use - # conf: confidence of prediction*/ - return tuple(bbox, landmarks, rotatedLandmarksWorld, invertRotationMask, heatmap, valConf); - } -}; - -std::string keys = -"{ help h | | Print help message. }" -"{ model m | pose_estimation_mediapipe_2023mar.onnx | Usage: Path to the model, defaults to person_detection_mediapipe_2023mar.onnx }" -"{ input i | | Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ conf_threshold | 0.5 | Usage: Filter out hands of confidence < conf_threshold. }" -"{ top_k | 1 | Usage: Keep top_k bounding boxes before NMS. }" -"{ save s | true | Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input. }" -"{ vis v | true | Usage: Specify to open a new window to show results. Invalid in case of camera input. }" -"{ backend bt | 0 | Choose one of computation backends: " -"0: (default) OpenCV implementation + CPU, " -"1: CUDA + GPU (CUDA), " -"2: CUDA + GPU (CUDA FP16), " -"3: TIM-VX + NPU, " -"4: CANN + NPU}"; - - -void drawLines(Mat image, Mat landmarks, Mat keeplandmarks, bool isDrawPoint = true, int thickness = 2) -{ - - vector> segment = { - make_pair(0, 1), make_pair(1, 2), make_pair(2, 3), make_pair(3, 7), - make_pair(0, 4), make_pair(4, 5), make_pair(5, 6), make_pair(6, 8), - make_pair(9, 10), - make_pair(12, 14), make_pair(14, 16), make_pair(16, 22), make_pair(16, 18), make_pair(16, 20), make_pair(18, 20), - make_pair(11, 13), make_pair(13, 15), make_pair(15, 21), make_pair(15, 19), make_pair(15, 17), make_pair(17, 19), - make_pair(11, 12), make_pair(11, 23), make_pair(23, 24), make_pair(24, 12), - make_pair(24, 26), make_pair(26, 28), make_pair(28, 30), make_pair(28, 32), make_pair(30, 32), - make_pair(23, 25), make_pair(25, 27),make_pair(27, 31), make_pair(27, 29), make_pair(29, 31) }; - for (auto p : segment) - if (keeplandmarks.at(p.first) && keeplandmarks.at(p.second)) - line(image, Point(landmarks.row(p.first)), Point(landmarks.row(p.second)), Scalar(255, 255, 255), thickness); - if (isDrawPoint) - for (int idxRow = 0; idxRow < landmarks.rows; idxRow++) - if (keeplandmarks.at(idxRow)) - circle(image, Point(landmarks.row(idxRow)), thickness, Scalar(0, 0, 255), -1); -} - - -pair visualize(Mat image, vector> poses, float fps=-1) -{ - Mat displayScreen = image.clone(); - Mat display3d(400, 400, CV_8UC3, Scalar::all(0)); - line(display3d, Point(200, 0), Point(200, 400), Scalar(255, 255, 255), 2); - line(display3d, Point(0, 200), Point(400, 200), Scalar(255, 255, 255), 2); - putText(display3d, "Main View", Point(0, 12), FONT_HERSHEY_DUPLEX, 0.5, Scalar(0, 0, 255)); - putText(display3d, "Top View", Point(200, 12), FONT_HERSHEY_DUPLEX, 0.5, Scalar(0, 0, 255)); - putText(display3d, "Left View", Point(0, 212), FONT_HERSHEY_DUPLEX, 0.5, Scalar(0, 0, 255)); - putText(display3d, "Right View", Point(200, 212), FONT_HERSHEY_DUPLEX, 0.5, Scalar(0, 0, 255)); - bool isDraw = false; // ensure only one person is drawn - - for (auto pose : poses) - { - Mat bbox = get<0>(pose); - if (!bbox.empty()) - { - Mat landmarksScreen = get<1>(pose); - Mat landmarksWord = get<2>(pose); - Mat mask; - get<3>(pose).convertTo(mask, CV_8U); - Mat heatmap = get<4>(pose); - float conf = get<5>(pose); - Mat edges; - Canny(mask, edges, 100, 200); - Mat kernel(2, 2, CV_8UC1, Scalar::all(1)); // expansion edge to 2 pixels - dilate(edges, edges, kernel); - Mat edgesBGR; - cvtColor(edges, edgesBGR, COLOR_GRAY2BGR); - Mat idxSelec = edges == 255; - edgesBGR.setTo(Scalar(0, 255, 0), idxSelec); - - add(edgesBGR, displayScreen, displayScreen); - // draw box - Mat box; - bbox.convertTo(box, CV_32S); - - rectangle(displayScreen, Point(box.row(0)), Point(box.row(1)), Scalar(0, 255, 0), 2); - putText(displayScreen, format("Conf = %4f", conf), Point(0, 35), FONT_HERSHEY_DUPLEX, 0.7,Scalar(0, 0, 255), 2); - if (fps > 0) - putText(displayScreen, format("FPS = %.2f", fps), Point(0, 55), FONT_HERSHEY_SIMPLEX, 0.7, Scalar(0, 0, 255), 2); - // Draw line between each key points - landmarksScreen = landmarksScreen.rowRange(0, landmarksScreen.rows - 6); - landmarksWord = landmarksWord.rowRange(0, landmarksWord.rows - 6); - - Mat keepLandmarks = landmarksScreen.col(4) > 0.8; // only show visible keypoints which presence bigger than 0.8 - - Mat landmarksXY; - landmarksScreen.colRange(0, 2).convertTo(landmarksXY, CV_32S); - drawLines(displayScreen, landmarksXY, keepLandmarks, false); - - // z value is relative to HIP, but we use constant to instead - for (int idxRow = 0; idxRow < landmarksScreen.rows; idxRow++) - { - Mat landmark;// p in enumerate(landmarks_screen[:, 0 : 3].astype(np.int32)) - landmarksScreen.row(idxRow).convertTo(landmark, CV_32S); - if (keepLandmarks.at(idxRow)) - circle(displayScreen, Point(landmark.at(0), landmark.at(1)), 2, Scalar(0, 0, 255), -1); - } - - if (!isDraw) - { - isDraw = true; - // Main view - Mat landmarksXY = landmarksWord.colRange(0, 2).clone(); - Mat x = landmarksXY * 100 + 100; - x.convertTo(landmarksXY, CV_32S); - drawLines(display3d, landmarksXY, keepLandmarks, true, 2); - - // Top view - Mat landmarksXZ; - hconcat(landmarksWord.col(0), landmarksWord.col(2), landmarksXZ); - landmarksXZ.col(1) = -landmarksXZ.col(1); - x = landmarksXZ * 100; - x.col(0) += 300; - x.col(1) += 100; - x.convertTo(landmarksXZ, CV_32S); - drawLines(display3d, landmarksXZ, keepLandmarks, true, 2); - - // Left view - Mat landmarksYZ; - hconcat(landmarksWord.col(2), landmarksWord.col(1), landmarksYZ); - landmarksYZ.col(0) = -landmarksYZ.col(0); - x = landmarksYZ * 100; - x.col(0) += 100; - x.col(1) += 300; - x.convertTo(landmarksYZ, CV_32S); - drawLines(display3d, landmarksYZ, keepLandmarks, true, 2); - - // Right view - Mat landmarksZY; - hconcat(landmarksWord.col(2), landmarksWord.col(1), landmarksZY); - x = landmarksZY * 100; - x.col(0) += 300; - x.col(1) += 300; - x.convertTo(landmarksZY, CV_32S); - drawLines(display3d, landmarksZY, keepLandmarks, true, 2); - } - } - } - return pair(displayScreen, display3d); -} - - - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Person Detector from MediaPipe"); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - string model = parser.get("model"); - float confThreshold = parser.get("conf_threshold"); - float scoreThreshold = 0.5f; - float nmsThreshold = 0.3f; - int topK = 5000; - bool vis = parser.get("vis"); - bool save = parser.get("save"); - int backendTargetid = parser.get("backend"); - - if (model.empty()) - { - CV_Error(Error::StsError, "Model file " + model + " not found"); - } - VideoCapture cap; - if (parser.has("input")) - cap.open(samples::findFile(parser.get("input"))); - else - cap.open(0); - Mat frame; - // person detector - MPPersonDet modelNet("../person_detection_mediapipe/person_detection_mediapipe_2023mar.onnx", nmsThreshold, scoreThreshold, topK, - backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - // pose estimator - MPPose poseEstimator(model, confThreshold, backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - //! [Open a video file or an image file or a camera stream] - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot open video or file"); - - static const std::string kWinName = "MPPose Demo"; - while (waitKey(1) < 0) - { - cap >> frame; - if (frame.empty()) - { - if (parser.has("input")) - { - cout << "Frame is empty" << endl; - break; - } - else - continue; - } - TickMeter tm; - tm.start(); - Mat person = modelNet.infer(frame); - tm.stop(); - vector> pose; - for (int idxRow = 0; idxRow < person.rows; idxRow++) - { - tuple re = poseEstimator.infer(frame, person.row(idxRow)); - if (!get<0>(re).empty()) - pose.push_back(re); - } - cout << "Inference time: " << tm.getTimeMilli() << " ms\n"; - pair duoimg = visualize(frame, pose, tm.getFPS()); - if (vis) - { - imshow(kWinName, get<0>(duoimg)); - imshow("3d", get<1>(duoimg)); - } - } - return 0; -} - - -Mat getMediapipeAnchor() -{ - Mat anchor= (Mat_(2254,2) << 0.017857142857142856, 0.017857142857142856, - 0.017857142857142856, 0.017857142857142856, - 0.05357142857142857, 0.017857142857142856, - 0.05357142857142857, 0.017857142857142856, - 0.08928571428571429, 0.017857142857142856, - 0.08928571428571429, 0.017857142857142856, - 0.125, 0.017857142857142856, - 0.125, 0.017857142857142856, - 0.16071428571428573, 0.017857142857142856, - 0.16071428571428573, 0.017857142857142856, - 0.19642857142857142, 0.017857142857142856, - 0.19642857142857142, 0.017857142857142856, - 0.23214285714285715, 0.017857142857142856, - 0.23214285714285715, 0.017857142857142856, - 0.26785714285714285, 0.017857142857142856, - 0.26785714285714285, 0.017857142857142856, - 0.30357142857142855, 0.017857142857142856, - 0.30357142857142855, 0.017857142857142856, - 0.3392857142857143, 0.017857142857142856, - 0.3392857142857143, 0.017857142857142856, - 0.375, 0.017857142857142856, - 0.375, 0.017857142857142856, - 0.4107142857142857, 0.017857142857142856, - 0.4107142857142857, 0.017857142857142856, - 0.44642857142857145, 0.017857142857142856, - 0.44642857142857145, 0.017857142857142856, - 0.48214285714285715, 0.017857142857142856, - 0.48214285714285715, 0.017857142857142856, - 0.5178571428571429, 0.017857142857142856, - 0.5178571428571429, 0.017857142857142856, - 0.5535714285714286, 0.017857142857142856, - 0.5535714285714286, 0.017857142857142856, - 0.5892857142857143, 0.017857142857142856, - 0.5892857142857143, 0.017857142857142856, - 0.625, 0.017857142857142856, - 0.625, 0.017857142857142856, - 0.6607142857142857, 0.017857142857142856, - 0.6607142857142857, 0.017857142857142856, - 0.6964285714285714, 0.017857142857142856, - 0.6964285714285714, 0.017857142857142856, - 0.7321428571428571, 0.017857142857142856, - 0.7321428571428571, 0.017857142857142856, - 0.7678571428571429, 0.017857142857142856, - 0.7678571428571429, 0.017857142857142856, - 0.8035714285714286, 0.017857142857142856, - 0.8035714285714286, 0.017857142857142856, - 0.8392857142857143, 0.017857142857142856, - 0.8392857142857143, 0.017857142857142856, - 0.875, 0.017857142857142856, - 0.875, 0.017857142857142856, - 0.9107142857142857, 0.017857142857142856, - 0.9107142857142857, 0.017857142857142856, - 0.9464285714285714, 0.017857142857142856, - 0.9464285714285714, 0.017857142857142856, - 0.9821428571428571, 0.017857142857142856, - 0.9821428571428571, 0.017857142857142856, - 0.017857142857142856, 0.05357142857142857, - 0.017857142857142856, 0.05357142857142857, - 0.05357142857142857, 0.05357142857142857, - 0.05357142857142857, 0.05357142857142857, - 0.08928571428571429, 0.05357142857142857, - 0.08928571428571429, 0.05357142857142857, - 0.125, 0.05357142857142857, - 0.125, 0.05357142857142857, - 0.16071428571428573, 0.05357142857142857, - 0.16071428571428573, 0.05357142857142857, - 0.19642857142857142, 0.05357142857142857, - 0.19642857142857142, 0.05357142857142857, - 0.23214285714285715, 0.05357142857142857, - 0.23214285714285715, 0.05357142857142857, - 0.26785714285714285, 0.05357142857142857, - 0.26785714285714285, 0.05357142857142857, - 0.30357142857142855, 0.05357142857142857, - 0.30357142857142855, 0.05357142857142857, - 0.3392857142857143, 0.05357142857142857, - 0.3392857142857143, 0.05357142857142857, - 0.375, 0.05357142857142857, - 0.375, 0.05357142857142857, - 0.4107142857142857, 0.05357142857142857, - 0.4107142857142857, 0.05357142857142857, - 0.44642857142857145, 0.05357142857142857, - 0.44642857142857145, 0.05357142857142857, - 0.48214285714285715, 0.05357142857142857, - 0.48214285714285715, 0.05357142857142857, - 0.5178571428571429, 0.05357142857142857, - 0.5178571428571429, 0.05357142857142857, - 0.5535714285714286, 0.05357142857142857, - 0.5535714285714286, 0.05357142857142857, - 0.5892857142857143, 0.05357142857142857, - 0.5892857142857143, 0.05357142857142857, - 0.625, 0.05357142857142857, - 0.625, 0.05357142857142857, - 0.6607142857142857, 0.05357142857142857, - 0.6607142857142857, 0.05357142857142857, - 0.6964285714285714, 0.05357142857142857, - 0.6964285714285714, 0.05357142857142857, - 0.7321428571428571, 0.05357142857142857, - 0.7321428571428571, 0.05357142857142857, - 0.7678571428571429, 0.05357142857142857, - 0.7678571428571429, 0.05357142857142857, - 0.8035714285714286, 0.05357142857142857, - 0.8035714285714286, 0.05357142857142857, - 0.8392857142857143, 0.05357142857142857, - 0.8392857142857143, 0.05357142857142857, - 0.875, 0.05357142857142857, - 0.875, 0.05357142857142857, - 0.9107142857142857, 0.05357142857142857, - 0.9107142857142857, 0.05357142857142857, - 0.9464285714285714, 0.05357142857142857, - 0.9464285714285714, 0.05357142857142857, - 0.9821428571428571, 0.05357142857142857, - 0.9821428571428571, 0.05357142857142857, - 0.017857142857142856, 0.08928571428571429, - 0.017857142857142856, 0.08928571428571429, - 0.05357142857142857, 0.08928571428571429, - 0.05357142857142857, 0.08928571428571429, - 0.08928571428571429, 0.08928571428571429, - 0.08928571428571429, 0.08928571428571429, - 0.125, 0.08928571428571429, - 0.125, 0.08928571428571429, - 0.16071428571428573, 0.08928571428571429, - 0.16071428571428573, 0.08928571428571429, - 0.19642857142857142, 0.08928571428571429, - 0.19642857142857142, 0.08928571428571429, - 0.23214285714285715, 0.08928571428571429, - 0.23214285714285715, 0.08928571428571429, - 0.26785714285714285, 0.08928571428571429, - 0.26785714285714285, 0.08928571428571429, - 0.30357142857142855, 0.08928571428571429, - 0.30357142857142855, 0.08928571428571429, - 0.3392857142857143, 0.08928571428571429, - 0.3392857142857143, 0.08928571428571429, - 0.375, 0.08928571428571429, - 0.375, 0.08928571428571429, - 0.4107142857142857, 0.08928571428571429, - 0.4107142857142857, 0.08928571428571429, - 0.44642857142857145, 0.08928571428571429, - 0.44642857142857145, 0.08928571428571429, - 0.48214285714285715, 0.08928571428571429, - 0.48214285714285715, 0.08928571428571429, - 0.5178571428571429, 0.08928571428571429, - 0.5178571428571429, 0.08928571428571429, - 0.5535714285714286, 0.08928571428571429, - 0.5535714285714286, 0.08928571428571429, - 0.5892857142857143, 0.08928571428571429, - 0.5892857142857143, 0.08928571428571429, - 0.625, 0.08928571428571429, - 0.625, 0.08928571428571429, - 0.6607142857142857, 0.08928571428571429, - 0.6607142857142857, 0.08928571428571429, - 0.6964285714285714, 0.08928571428571429, - 0.6964285714285714, 0.08928571428571429, - 0.7321428571428571, 0.08928571428571429, - 0.7321428571428571, 0.08928571428571429, - 0.7678571428571429, 0.08928571428571429, - 0.7678571428571429, 0.08928571428571429, - 0.8035714285714286, 0.08928571428571429, - 0.8035714285714286, 0.08928571428571429, - 0.8392857142857143, 0.08928571428571429, - 0.8392857142857143, 0.08928571428571429, - 0.875, 0.08928571428571429, - 0.875, 0.08928571428571429, - 0.9107142857142857, 0.08928571428571429, - 0.9107142857142857, 0.08928571428571429, - 0.9464285714285714, 0.08928571428571429, - 0.9464285714285714, 0.08928571428571429, - 0.9821428571428571, 0.08928571428571429, - 0.9821428571428571, 0.08928571428571429, - 0.017857142857142856, 0.125, - 0.017857142857142856, 0.125, - 0.05357142857142857, 0.125, - 0.05357142857142857, 0.125, - 0.08928571428571429, 0.125, - 0.08928571428571429, 0.125, - 0.125, 0.125, - 0.125, 0.125, - 0.16071428571428573, 0.125, - 0.16071428571428573, 0.125, - 0.19642857142857142, 0.125, - 0.19642857142857142, 0.125, - 0.23214285714285715, 0.125, - 0.23214285714285715, 0.125, - 0.26785714285714285, 0.125, - 0.26785714285714285, 0.125, - 0.30357142857142855, 0.125, - 0.30357142857142855, 0.125, - 0.3392857142857143, 0.125, - 0.3392857142857143, 0.125, - 0.375, 0.125, - 0.375, 0.125, - 0.4107142857142857, 0.125, - 0.4107142857142857, 0.125, - 0.44642857142857145, 0.125, - 0.44642857142857145, 0.125, - 0.48214285714285715, 0.125, - 0.48214285714285715, 0.125, - 0.5178571428571429, 0.125, - 0.5178571428571429, 0.125, - 0.5535714285714286, 0.125, - 0.5535714285714286, 0.125, - 0.5892857142857143, 0.125, - 0.5892857142857143, 0.125, - 0.625, 0.125, - 0.625, 0.125, - 0.6607142857142857, 0.125, - 0.6607142857142857, 0.125, - 0.6964285714285714, 0.125, - 0.6964285714285714, 0.125, - 0.7321428571428571, 0.125, - 0.7321428571428571, 0.125, - 0.7678571428571429, 0.125, - 0.7678571428571429, 0.125, - 0.8035714285714286, 0.125, - 0.8035714285714286, 0.125, - 0.8392857142857143, 0.125, - 0.8392857142857143, 0.125, - 0.875, 0.125, - 0.875, 0.125, - 0.9107142857142857, 0.125, - 0.9107142857142857, 0.125, - 0.9464285714285714, 0.125, - 0.9464285714285714, 0.125, - 0.9821428571428571, 0.125, - 0.9821428571428571, 0.125, - 0.017857142857142856, 0.16071428571428573, - 0.017857142857142856, 0.16071428571428573, - 0.05357142857142857, 0.16071428571428573, - 0.05357142857142857, 0.16071428571428573, - 0.08928571428571429, 0.16071428571428573, - 0.08928571428571429, 0.16071428571428573, - 0.125, 0.16071428571428573, - 0.125, 0.16071428571428573, - 0.16071428571428573, 0.16071428571428573, - 0.16071428571428573, 0.16071428571428573, - 0.19642857142857142, 0.16071428571428573, - 0.19642857142857142, 0.16071428571428573, - 0.23214285714285715, 0.16071428571428573, - 0.23214285714285715, 0.16071428571428573, - 0.26785714285714285, 0.16071428571428573, - 0.26785714285714285, 0.16071428571428573, - 0.30357142857142855, 0.16071428571428573, - 0.30357142857142855, 0.16071428571428573, - 0.3392857142857143, 0.16071428571428573, - 0.3392857142857143, 0.16071428571428573, - 0.375, 0.16071428571428573, - 0.375, 0.16071428571428573, - 0.4107142857142857, 0.16071428571428573, - 0.4107142857142857, 0.16071428571428573, - 0.44642857142857145, 0.16071428571428573, - 0.44642857142857145, 0.16071428571428573, - 0.48214285714285715, 0.16071428571428573, - 0.48214285714285715, 0.16071428571428573, - 0.5178571428571429, 0.16071428571428573, - 0.5178571428571429, 0.16071428571428573, - 0.5535714285714286, 0.16071428571428573, - 0.5535714285714286, 0.16071428571428573, - 0.5892857142857143, 0.16071428571428573, - 0.5892857142857143, 0.16071428571428573, - 0.625, 0.16071428571428573, - 0.625, 0.16071428571428573, - 0.6607142857142857, 0.16071428571428573, - 0.6607142857142857, 0.16071428571428573, - 0.6964285714285714, 0.16071428571428573, - 0.6964285714285714, 0.16071428571428573, - 0.7321428571428571, 0.16071428571428573, - 0.7321428571428571, 0.16071428571428573, - 0.7678571428571429, 0.16071428571428573, - 0.7678571428571429, 0.16071428571428573, - 0.8035714285714286, 0.16071428571428573, - 0.8035714285714286, 0.16071428571428573, - 0.8392857142857143, 0.16071428571428573, - 0.8392857142857143, 0.16071428571428573, - 0.875, 0.16071428571428573, - 0.875, 0.16071428571428573, - 0.9107142857142857, 0.16071428571428573, - 0.9107142857142857, 0.16071428571428573, - 0.9464285714285714, 0.16071428571428573, - 0.9464285714285714, 0.16071428571428573, - 0.9821428571428571, 0.16071428571428573, - 0.9821428571428571, 0.16071428571428573, - 0.017857142857142856, 0.19642857142857142, - 0.017857142857142856, 0.19642857142857142, - 0.05357142857142857, 0.19642857142857142, - 0.05357142857142857, 0.19642857142857142, - 0.08928571428571429, 0.19642857142857142, - 0.08928571428571429, 0.19642857142857142, - 0.125, 0.19642857142857142, - 0.125, 0.19642857142857142, - 0.16071428571428573, 0.19642857142857142, - 0.16071428571428573, 0.19642857142857142, - 0.19642857142857142, 0.19642857142857142, - 0.19642857142857142, 0.19642857142857142, - 0.23214285714285715, 0.19642857142857142, - 0.23214285714285715, 0.19642857142857142, - 0.26785714285714285, 0.19642857142857142, - 0.26785714285714285, 0.19642857142857142, - 0.30357142857142855, 0.19642857142857142, - 0.30357142857142855, 0.19642857142857142, - 0.3392857142857143, 0.19642857142857142, - 0.3392857142857143, 0.19642857142857142, - 0.375, 0.19642857142857142, - 0.375, 0.19642857142857142, - 0.4107142857142857, 0.19642857142857142, - 0.4107142857142857, 0.19642857142857142, - 0.44642857142857145, 0.19642857142857142, - 0.44642857142857145, 0.19642857142857142, - 0.48214285714285715, 0.19642857142857142, - 0.48214285714285715, 0.19642857142857142, - 0.5178571428571429, 0.19642857142857142, - 0.5178571428571429, 0.19642857142857142, - 0.5535714285714286, 0.19642857142857142, - 0.5535714285714286, 0.19642857142857142, - 0.5892857142857143, 0.19642857142857142, - 0.5892857142857143, 0.19642857142857142, - 0.625, 0.19642857142857142, - 0.625, 0.19642857142857142, - 0.6607142857142857, 0.19642857142857142, - 0.6607142857142857, 0.19642857142857142, - 0.6964285714285714, 0.19642857142857142, - 0.6964285714285714, 0.19642857142857142, - 0.7321428571428571, 0.19642857142857142, - 0.7321428571428571, 0.19642857142857142, - 0.7678571428571429, 0.19642857142857142, - 0.7678571428571429, 0.19642857142857142, - 0.8035714285714286, 0.19642857142857142, - 0.8035714285714286, 0.19642857142857142, - 0.8392857142857143, 0.19642857142857142, - 0.8392857142857143, 0.19642857142857142, - 0.875, 0.19642857142857142, - 0.875, 0.19642857142857142, - 0.9107142857142857, 0.19642857142857142, - 0.9107142857142857, 0.19642857142857142, - 0.9464285714285714, 0.19642857142857142, - 0.9464285714285714, 0.19642857142857142, - 0.9821428571428571, 0.19642857142857142, - 0.9821428571428571, 0.19642857142857142, - 0.017857142857142856, 0.23214285714285715, - 0.017857142857142856, 0.23214285714285715, - 0.05357142857142857, 0.23214285714285715, - 0.05357142857142857, 0.23214285714285715, - 0.08928571428571429, 0.23214285714285715, - 0.08928571428571429, 0.23214285714285715, - 0.125, 0.23214285714285715, - 0.125, 0.23214285714285715, - 0.16071428571428573, 0.23214285714285715, - 0.16071428571428573, 0.23214285714285715, - 0.19642857142857142, 0.23214285714285715, - 0.19642857142857142, 0.23214285714285715, - 0.23214285714285715, 0.23214285714285715, - 0.23214285714285715, 0.23214285714285715, - 0.26785714285714285, 0.23214285714285715, - 0.26785714285714285, 0.23214285714285715, - 0.30357142857142855, 0.23214285714285715, - 0.30357142857142855, 0.23214285714285715, - 0.3392857142857143, 0.23214285714285715, - 0.3392857142857143, 0.23214285714285715, - 0.375, 0.23214285714285715, - 0.375, 0.23214285714285715, - 0.4107142857142857, 0.23214285714285715, - 0.4107142857142857, 0.23214285714285715, - 0.44642857142857145, 0.23214285714285715, - 0.44642857142857145, 0.23214285714285715, - 0.48214285714285715, 0.23214285714285715, - 0.48214285714285715, 0.23214285714285715, - 0.5178571428571429, 0.23214285714285715, - 0.5178571428571429, 0.23214285714285715, - 0.5535714285714286, 0.23214285714285715, - 0.5535714285714286, 0.23214285714285715, - 0.5892857142857143, 0.23214285714285715, - 0.5892857142857143, 0.23214285714285715, - 0.625, 0.23214285714285715, - 0.625, 0.23214285714285715, - 0.6607142857142857, 0.23214285714285715, - 0.6607142857142857, 0.23214285714285715, - 0.6964285714285714, 0.23214285714285715, - 0.6964285714285714, 0.23214285714285715, - 0.7321428571428571, 0.23214285714285715, - 0.7321428571428571, 0.23214285714285715, - 0.7678571428571429, 0.23214285714285715, - 0.7678571428571429, 0.23214285714285715, - 0.8035714285714286, 0.23214285714285715, - 0.8035714285714286, 0.23214285714285715, - 0.8392857142857143, 0.23214285714285715, - 0.8392857142857143, 0.23214285714285715, - 0.875, 0.23214285714285715, - 0.875, 0.23214285714285715, - 0.9107142857142857, 0.23214285714285715, - 0.9107142857142857, 0.23214285714285715, - 0.9464285714285714, 0.23214285714285715, - 0.9464285714285714, 0.23214285714285715, - 0.9821428571428571, 0.23214285714285715, - 0.9821428571428571, 0.23214285714285715, - 0.017857142857142856, 0.26785714285714285, - 0.017857142857142856, 0.26785714285714285, - 0.05357142857142857, 0.26785714285714285, - 0.05357142857142857, 0.26785714285714285, - 0.08928571428571429, 0.26785714285714285, - 0.08928571428571429, 0.26785714285714285, - 0.125, 0.26785714285714285, - 0.125, 0.26785714285714285, - 0.16071428571428573, 0.26785714285714285, - 0.16071428571428573, 0.26785714285714285, - 0.19642857142857142, 0.26785714285714285, - 0.19642857142857142, 0.26785714285714285, - 0.23214285714285715, 0.26785714285714285, - 0.23214285714285715, 0.26785714285714285, - 0.26785714285714285, 0.26785714285714285, - 0.26785714285714285, 0.26785714285714285, - 0.30357142857142855, 0.26785714285714285, - 0.30357142857142855, 0.26785714285714285, - 0.3392857142857143, 0.26785714285714285, - 0.3392857142857143, 0.26785714285714285, - 0.375, 0.26785714285714285, - 0.375, 0.26785714285714285, - 0.4107142857142857, 0.26785714285714285, - 0.4107142857142857, 0.26785714285714285, - 0.44642857142857145, 0.26785714285714285, - 0.44642857142857145, 0.26785714285714285, - 0.48214285714285715, 0.26785714285714285, - 0.48214285714285715, 0.26785714285714285, - 0.5178571428571429, 0.26785714285714285, - 0.5178571428571429, 0.26785714285714285, - 0.5535714285714286, 0.26785714285714285, - 0.5535714285714286, 0.26785714285714285, - 0.5892857142857143, 0.26785714285714285, - 0.5892857142857143, 0.26785714285714285, - 0.625, 0.26785714285714285, - 0.625, 0.26785714285714285, - 0.6607142857142857, 0.26785714285714285, - 0.6607142857142857, 0.26785714285714285, - 0.6964285714285714, 0.26785714285714285, - 0.6964285714285714, 0.26785714285714285, - 0.7321428571428571, 0.26785714285714285, - 0.7321428571428571, 0.26785714285714285, - 0.7678571428571429, 0.26785714285714285, - 0.7678571428571429, 0.26785714285714285, - 0.8035714285714286, 0.26785714285714285, - 0.8035714285714286, 0.26785714285714285, - 0.8392857142857143, 0.26785714285714285, - 0.8392857142857143, 0.26785714285714285, - 0.875, 0.26785714285714285, - 0.875, 0.26785714285714285, - 0.9107142857142857, 0.26785714285714285, - 0.9107142857142857, 0.26785714285714285, - 0.9464285714285714, 0.26785714285714285, - 0.9464285714285714, 0.26785714285714285, - 0.9821428571428571, 0.26785714285714285, - 0.9821428571428571, 0.26785714285714285, - 0.017857142857142856, 0.30357142857142855, - 0.017857142857142856, 0.30357142857142855, - 0.05357142857142857, 0.30357142857142855, - 0.05357142857142857, 0.30357142857142855, - 0.08928571428571429, 0.30357142857142855, - 0.08928571428571429, 0.30357142857142855, - 0.125, 0.30357142857142855, - 0.125, 0.30357142857142855, - 0.16071428571428573, 0.30357142857142855, - 0.16071428571428573, 0.30357142857142855, - 0.19642857142857142, 0.30357142857142855, - 0.19642857142857142, 0.30357142857142855, - 0.23214285714285715, 0.30357142857142855, - 0.23214285714285715, 0.30357142857142855, - 0.26785714285714285, 0.30357142857142855, - 0.26785714285714285, 0.30357142857142855, - 0.30357142857142855, 0.30357142857142855, - 0.30357142857142855, 0.30357142857142855, - 0.3392857142857143, 0.30357142857142855, - 0.3392857142857143, 0.30357142857142855, - 0.375, 0.30357142857142855, - 0.375, 0.30357142857142855, - 0.4107142857142857, 0.30357142857142855, - 0.4107142857142857, 0.30357142857142855, - 0.44642857142857145, 0.30357142857142855, - 0.44642857142857145, 0.30357142857142855, - 0.48214285714285715, 0.30357142857142855, - 0.48214285714285715, 0.30357142857142855, - 0.5178571428571429, 0.30357142857142855, - 0.5178571428571429, 0.30357142857142855, - 0.5535714285714286, 0.30357142857142855, - 0.5535714285714286, 0.30357142857142855, - 0.5892857142857143, 0.30357142857142855, - 0.5892857142857143, 0.30357142857142855, - 0.625, 0.30357142857142855, - 0.625, 0.30357142857142855, - 0.6607142857142857, 0.30357142857142855, - 0.6607142857142857, 0.30357142857142855, - 0.6964285714285714, 0.30357142857142855, - 0.6964285714285714, 0.30357142857142855, - 0.7321428571428571, 0.30357142857142855, - 0.7321428571428571, 0.30357142857142855, - 0.7678571428571429, 0.30357142857142855, - 0.7678571428571429, 0.30357142857142855, - 0.8035714285714286, 0.30357142857142855, - 0.8035714285714286, 0.30357142857142855, - 0.8392857142857143, 0.30357142857142855, - 0.8392857142857143, 0.30357142857142855, - 0.875, 0.30357142857142855, - 0.875, 0.30357142857142855, - 0.9107142857142857, 0.30357142857142855, - 0.9107142857142857, 0.30357142857142855, - 0.9464285714285714, 0.30357142857142855, - 0.9464285714285714, 0.30357142857142855, - 0.9821428571428571, 0.30357142857142855, - 0.9821428571428571, 0.30357142857142855, - 0.017857142857142856, 0.3392857142857143, - 0.017857142857142856, 0.3392857142857143, - 0.05357142857142857, 0.3392857142857143, - 0.05357142857142857, 0.3392857142857143, - 0.08928571428571429, 0.3392857142857143, - 0.08928571428571429, 0.3392857142857143, - 0.125, 0.3392857142857143, - 0.125, 0.3392857142857143, - 0.16071428571428573, 0.3392857142857143, - 0.16071428571428573, 0.3392857142857143, - 0.19642857142857142, 0.3392857142857143, - 0.19642857142857142, 0.3392857142857143, - 0.23214285714285715, 0.3392857142857143, - 0.23214285714285715, 0.3392857142857143, - 0.26785714285714285, 0.3392857142857143, - 0.26785714285714285, 0.3392857142857143, - 0.30357142857142855, 0.3392857142857143, - 0.30357142857142855, 0.3392857142857143, - 0.3392857142857143, 0.3392857142857143, - 0.3392857142857143, 0.3392857142857143, - 0.375, 0.3392857142857143, - 0.375, 0.3392857142857143, - 0.4107142857142857, 0.3392857142857143, - 0.4107142857142857, 0.3392857142857143, - 0.44642857142857145, 0.3392857142857143, - 0.44642857142857145, 0.3392857142857143, - 0.48214285714285715, 0.3392857142857143, - 0.48214285714285715, 0.3392857142857143, - 0.5178571428571429, 0.3392857142857143, - 0.5178571428571429, 0.3392857142857143, - 0.5535714285714286, 0.3392857142857143, - 0.5535714285714286, 0.3392857142857143, - 0.5892857142857143, 0.3392857142857143, - 0.5892857142857143, 0.3392857142857143, - 0.625, 0.3392857142857143, - 0.625, 0.3392857142857143, - 0.6607142857142857, 0.3392857142857143, - 0.6607142857142857, 0.3392857142857143, - 0.6964285714285714, 0.3392857142857143, - 0.6964285714285714, 0.3392857142857143, - 0.7321428571428571, 0.3392857142857143, - 0.7321428571428571, 0.3392857142857143, - 0.7678571428571429, 0.3392857142857143, - 0.7678571428571429, 0.3392857142857143, - 0.8035714285714286, 0.3392857142857143, - 0.8035714285714286, 0.3392857142857143, - 0.8392857142857143, 0.3392857142857143, - 0.8392857142857143, 0.3392857142857143, - 0.875, 0.3392857142857143, - 0.875, 0.3392857142857143, - 0.9107142857142857, 0.3392857142857143, - 0.9107142857142857, 0.3392857142857143, - 0.9464285714285714, 0.3392857142857143, - 0.9464285714285714, 0.3392857142857143, - 0.9821428571428571, 0.3392857142857143, - 0.9821428571428571, 0.3392857142857143, - 0.017857142857142856, 0.375, - 0.017857142857142856, 0.375, - 0.05357142857142857, 0.375, - 0.05357142857142857, 0.375, - 0.08928571428571429, 0.375, - 0.08928571428571429, 0.375, - 0.125, 0.375, - 0.125, 0.375, - 0.16071428571428573, 0.375, - 0.16071428571428573, 0.375, - 0.19642857142857142, 0.375, - 0.19642857142857142, 0.375, - 0.23214285714285715, 0.375, - 0.23214285714285715, 0.375, - 0.26785714285714285, 0.375, - 0.26785714285714285, 0.375, - 0.30357142857142855, 0.375, - 0.30357142857142855, 0.375, - 0.3392857142857143, 0.375, - 0.3392857142857143, 0.375, - 0.375, 0.375, - 0.375, 0.375, - 0.4107142857142857, 0.375, - 0.4107142857142857, 0.375, - 0.44642857142857145, 0.375, - 0.44642857142857145, 0.375, - 0.48214285714285715, 0.375, - 0.48214285714285715, 0.375, - 0.5178571428571429, 0.375, - 0.5178571428571429, 0.375, - 0.5535714285714286, 0.375, - 0.5535714285714286, 0.375, - 0.5892857142857143, 0.375, - 0.5892857142857143, 0.375, - 0.625, 0.375, - 0.625, 0.375, - 0.6607142857142857, 0.375, - 0.6607142857142857, 0.375, - 0.6964285714285714, 0.375, - 0.6964285714285714, 0.375, - 0.7321428571428571, 0.375, - 0.7321428571428571, 0.375, - 0.7678571428571429, 0.375, - 0.7678571428571429, 0.375, - 0.8035714285714286, 0.375, - 0.8035714285714286, 0.375, - 0.8392857142857143, 0.375, - 0.8392857142857143, 0.375, - 0.875, 0.375, - 0.875, 0.375, - 0.9107142857142857, 0.375, - 0.9107142857142857, 0.375, - 0.9464285714285714, 0.375, - 0.9464285714285714, 0.375, - 0.9821428571428571, 0.375, - 0.9821428571428571, 0.375, - 0.017857142857142856, 0.4107142857142857, - 0.017857142857142856, 0.4107142857142857, - 0.05357142857142857, 0.4107142857142857, - 0.05357142857142857, 0.4107142857142857, - 0.08928571428571429, 0.4107142857142857, - 0.08928571428571429, 0.4107142857142857, - 0.125, 0.4107142857142857, - 0.125, 0.4107142857142857, - 0.16071428571428573, 0.4107142857142857, - 0.16071428571428573, 0.4107142857142857, - 0.19642857142857142, 0.4107142857142857, - 0.19642857142857142, 0.4107142857142857, - 0.23214285714285715, 0.4107142857142857, - 0.23214285714285715, 0.4107142857142857, - 0.26785714285714285, 0.4107142857142857, - 0.26785714285714285, 0.4107142857142857, - 0.30357142857142855, 0.4107142857142857, - 0.30357142857142855, 0.4107142857142857, - 0.3392857142857143, 0.4107142857142857, - 0.3392857142857143, 0.4107142857142857, - 0.375, 0.4107142857142857, - 0.375, 0.4107142857142857, - 0.4107142857142857, 0.4107142857142857, - 0.4107142857142857, 0.4107142857142857, - 0.44642857142857145, 0.4107142857142857, - 0.44642857142857145, 0.4107142857142857, - 0.48214285714285715, 0.4107142857142857, - 0.48214285714285715, 0.4107142857142857, - 0.5178571428571429, 0.4107142857142857, - 0.5178571428571429, 0.4107142857142857, - 0.5535714285714286, 0.4107142857142857, - 0.5535714285714286, 0.4107142857142857, - 0.5892857142857143, 0.4107142857142857, - 0.5892857142857143, 0.4107142857142857, - 0.625, 0.4107142857142857, - 0.625, 0.4107142857142857, - 0.6607142857142857, 0.4107142857142857, - 0.6607142857142857, 0.4107142857142857, - 0.6964285714285714, 0.4107142857142857, - 0.6964285714285714, 0.4107142857142857, - 0.7321428571428571, 0.4107142857142857, - 0.7321428571428571, 0.4107142857142857, - 0.7678571428571429, 0.4107142857142857, - 0.7678571428571429, 0.4107142857142857, - 0.8035714285714286, 0.4107142857142857, - 0.8035714285714286, 0.4107142857142857, - 0.8392857142857143, 0.4107142857142857, - 0.8392857142857143, 0.4107142857142857, - 0.875, 0.4107142857142857, - 0.875, 0.4107142857142857, - 0.9107142857142857, 0.4107142857142857, - 0.9107142857142857, 0.4107142857142857, - 0.9464285714285714, 0.4107142857142857, - 0.9464285714285714, 0.4107142857142857, - 0.9821428571428571, 0.4107142857142857, - 0.9821428571428571, 0.4107142857142857, - 0.017857142857142856, 0.44642857142857145, - 0.017857142857142856, 0.44642857142857145, - 0.05357142857142857, 0.44642857142857145, - 0.05357142857142857, 0.44642857142857145, - 0.08928571428571429, 0.44642857142857145, - 0.08928571428571429, 0.44642857142857145, - 0.125, 0.44642857142857145, - 0.125, 0.44642857142857145, - 0.16071428571428573, 0.44642857142857145, - 0.16071428571428573, 0.44642857142857145, - 0.19642857142857142, 0.44642857142857145, - 0.19642857142857142, 0.44642857142857145, - 0.23214285714285715, 0.44642857142857145, - 0.23214285714285715, 0.44642857142857145, - 0.26785714285714285, 0.44642857142857145, - 0.26785714285714285, 0.44642857142857145, - 0.30357142857142855, 0.44642857142857145, - 0.30357142857142855, 0.44642857142857145, - 0.3392857142857143, 0.44642857142857145, - 0.3392857142857143, 0.44642857142857145, - 0.375, 0.44642857142857145, - 0.375, 0.44642857142857145, - 0.4107142857142857, 0.44642857142857145, - 0.4107142857142857, 0.44642857142857145, - 0.44642857142857145, 0.44642857142857145, - 0.44642857142857145, 0.44642857142857145, - 0.48214285714285715, 0.44642857142857145, - 0.48214285714285715, 0.44642857142857145, - 0.5178571428571429, 0.44642857142857145, - 0.5178571428571429, 0.44642857142857145, - 0.5535714285714286, 0.44642857142857145, - 0.5535714285714286, 0.44642857142857145, - 0.5892857142857143, 0.44642857142857145, - 0.5892857142857143, 0.44642857142857145, - 0.625, 0.44642857142857145, - 0.625, 0.44642857142857145, - 0.6607142857142857, 0.44642857142857145, - 0.6607142857142857, 0.44642857142857145, - 0.6964285714285714, 0.44642857142857145, - 0.6964285714285714, 0.44642857142857145, - 0.7321428571428571, 0.44642857142857145, - 0.7321428571428571, 0.44642857142857145, - 0.7678571428571429, 0.44642857142857145, - 0.7678571428571429, 0.44642857142857145, - 0.8035714285714286, 0.44642857142857145, - 0.8035714285714286, 0.44642857142857145, - 0.8392857142857143, 0.44642857142857145, - 0.8392857142857143, 0.44642857142857145, - 0.875, 0.44642857142857145, - 0.875, 0.44642857142857145, - 0.9107142857142857, 0.44642857142857145, - 0.9107142857142857, 0.44642857142857145, - 0.9464285714285714, 0.44642857142857145, - 0.9464285714285714, 0.44642857142857145, - 0.9821428571428571, 0.44642857142857145, - 0.9821428571428571, 0.44642857142857145, - 0.017857142857142856, 0.48214285714285715, - 0.017857142857142856, 0.48214285714285715, - 0.05357142857142857, 0.48214285714285715, - 0.05357142857142857, 0.48214285714285715, - 0.08928571428571429, 0.48214285714285715, - 0.08928571428571429, 0.48214285714285715, - 0.125, 0.48214285714285715, - 0.125, 0.48214285714285715, - 0.16071428571428573, 0.48214285714285715, - 0.16071428571428573, 0.48214285714285715, - 0.19642857142857142, 0.48214285714285715, - 0.19642857142857142, 0.48214285714285715, - 0.23214285714285715, 0.48214285714285715, - 0.23214285714285715, 0.48214285714285715, - 0.26785714285714285, 0.48214285714285715, - 0.26785714285714285, 0.48214285714285715, - 0.30357142857142855, 0.48214285714285715, - 0.30357142857142855, 0.48214285714285715, - 0.3392857142857143, 0.48214285714285715, - 0.3392857142857143, 0.48214285714285715, - 0.375, 0.48214285714285715, - 0.375, 0.48214285714285715, - 0.4107142857142857, 0.48214285714285715, - 0.4107142857142857, 0.48214285714285715, - 0.44642857142857145, 0.48214285714285715, - 0.44642857142857145, 0.48214285714285715, - 0.48214285714285715, 0.48214285714285715, - 0.48214285714285715, 0.48214285714285715, - 0.5178571428571429, 0.48214285714285715, - 0.5178571428571429, 0.48214285714285715, - 0.5535714285714286, 0.48214285714285715, - 0.5535714285714286, 0.48214285714285715, - 0.5892857142857143, 0.48214285714285715, - 0.5892857142857143, 0.48214285714285715, - 0.625, 0.48214285714285715, - 0.625, 0.48214285714285715, - 0.6607142857142857, 0.48214285714285715, - 0.6607142857142857, 0.48214285714285715, - 0.6964285714285714, 0.48214285714285715, - 0.6964285714285714, 0.48214285714285715, - 0.7321428571428571, 0.48214285714285715, - 0.7321428571428571, 0.48214285714285715, - 0.7678571428571429, 0.48214285714285715, - 0.7678571428571429, 0.48214285714285715, - 0.8035714285714286, 0.48214285714285715, - 0.8035714285714286, 0.48214285714285715, - 0.8392857142857143, 0.48214285714285715, - 0.8392857142857143, 0.48214285714285715, - 0.875, 0.48214285714285715, - 0.875, 0.48214285714285715, - 0.9107142857142857, 0.48214285714285715, - 0.9107142857142857, 0.48214285714285715, - 0.9464285714285714, 0.48214285714285715, - 0.9464285714285714, 0.48214285714285715, - 0.9821428571428571, 0.48214285714285715, - 0.9821428571428571, 0.48214285714285715, - 0.017857142857142856, 0.5178571428571429, - 0.017857142857142856, 0.5178571428571429, - 0.05357142857142857, 0.5178571428571429, - 0.05357142857142857, 0.5178571428571429, - 0.08928571428571429, 0.5178571428571429, - 0.08928571428571429, 0.5178571428571429, - 0.125, 0.5178571428571429, - 0.125, 0.5178571428571429, - 0.16071428571428573, 0.5178571428571429, - 0.16071428571428573, 0.5178571428571429, - 0.19642857142857142, 0.5178571428571429, - 0.19642857142857142, 0.5178571428571429, - 0.23214285714285715, 0.5178571428571429, - 0.23214285714285715, 0.5178571428571429, - 0.26785714285714285, 0.5178571428571429, - 0.26785714285714285, 0.5178571428571429, - 0.30357142857142855, 0.5178571428571429, - 0.30357142857142855, 0.5178571428571429, - 0.3392857142857143, 0.5178571428571429, - 0.3392857142857143, 0.5178571428571429, - 0.375, 0.5178571428571429, - 0.375, 0.5178571428571429, - 0.4107142857142857, 0.5178571428571429, - 0.4107142857142857, 0.5178571428571429, - 0.44642857142857145, 0.5178571428571429, - 0.44642857142857145, 0.5178571428571429, - 0.48214285714285715, 0.5178571428571429, - 0.48214285714285715, 0.5178571428571429, - 0.5178571428571429, 0.5178571428571429, - 0.5178571428571429, 0.5178571428571429, - 0.5535714285714286, 0.5178571428571429, - 0.5535714285714286, 0.5178571428571429, - 0.5892857142857143, 0.5178571428571429, - 0.5892857142857143, 0.5178571428571429, - 0.625, 0.5178571428571429, - 0.625, 0.5178571428571429, - 0.6607142857142857, 0.5178571428571429, - 0.6607142857142857, 0.5178571428571429, - 0.6964285714285714, 0.5178571428571429, - 0.6964285714285714, 0.5178571428571429, - 0.7321428571428571, 0.5178571428571429, - 0.7321428571428571, 0.5178571428571429, - 0.7678571428571429, 0.5178571428571429, - 0.7678571428571429, 0.5178571428571429, - 0.8035714285714286, 0.5178571428571429, - 0.8035714285714286, 0.5178571428571429, - 0.8392857142857143, 0.5178571428571429, - 0.8392857142857143, 0.5178571428571429, - 0.875, 0.5178571428571429, - 0.875, 0.5178571428571429, - 0.9107142857142857, 0.5178571428571429, - 0.9107142857142857, 0.5178571428571429, - 0.9464285714285714, 0.5178571428571429, - 0.9464285714285714, 0.5178571428571429, - 0.9821428571428571, 0.5178571428571429, - 0.9821428571428571, 0.5178571428571429, - 0.017857142857142856, 0.5535714285714286, - 0.017857142857142856, 0.5535714285714286, - 0.05357142857142857, 0.5535714285714286, - 0.05357142857142857, 0.5535714285714286, - 0.08928571428571429, 0.5535714285714286, - 0.08928571428571429, 0.5535714285714286, - 0.125, 0.5535714285714286, - 0.125, 0.5535714285714286, - 0.16071428571428573, 0.5535714285714286, - 0.16071428571428573, 0.5535714285714286, - 0.19642857142857142, 0.5535714285714286, - 0.19642857142857142, 0.5535714285714286, - 0.23214285714285715, 0.5535714285714286, - 0.23214285714285715, 0.5535714285714286, - 0.26785714285714285, 0.5535714285714286, - 0.26785714285714285, 0.5535714285714286, - 0.30357142857142855, 0.5535714285714286, - 0.30357142857142855, 0.5535714285714286, - 0.3392857142857143, 0.5535714285714286, - 0.3392857142857143, 0.5535714285714286, - 0.375, 0.5535714285714286, - 0.375, 0.5535714285714286, - 0.4107142857142857, 0.5535714285714286, - 0.4107142857142857, 0.5535714285714286, - 0.44642857142857145, 0.5535714285714286, - 0.44642857142857145, 0.5535714285714286, - 0.48214285714285715, 0.5535714285714286, - 0.48214285714285715, 0.5535714285714286, - 0.5178571428571429, 0.5535714285714286, - 0.5178571428571429, 0.5535714285714286, - 0.5535714285714286, 0.5535714285714286, - 0.5535714285714286, 0.5535714285714286, - 0.5892857142857143, 0.5535714285714286, - 0.5892857142857143, 0.5535714285714286, - 0.625, 0.5535714285714286, - 0.625, 0.5535714285714286, - 0.6607142857142857, 0.5535714285714286, - 0.6607142857142857, 0.5535714285714286, - 0.6964285714285714, 0.5535714285714286, - 0.6964285714285714, 0.5535714285714286, - 0.7321428571428571, 0.5535714285714286, - 0.7321428571428571, 0.5535714285714286, - 0.7678571428571429, 0.5535714285714286, - 0.7678571428571429, 0.5535714285714286, - 0.8035714285714286, 0.5535714285714286, - 0.8035714285714286, 0.5535714285714286, - 0.8392857142857143, 0.5535714285714286, - 0.8392857142857143, 0.5535714285714286, - 0.875, 0.5535714285714286, - 0.875, 0.5535714285714286, - 0.9107142857142857, 0.5535714285714286, - 0.9107142857142857, 0.5535714285714286, - 0.9464285714285714, 0.5535714285714286, - 0.9464285714285714, 0.5535714285714286, - 0.9821428571428571, 0.5535714285714286, - 0.9821428571428571, 0.5535714285714286, - 0.017857142857142856, 0.5892857142857143, - 0.017857142857142856, 0.5892857142857143, - 0.05357142857142857, 0.5892857142857143, - 0.05357142857142857, 0.5892857142857143, - 0.08928571428571429, 0.5892857142857143, - 0.08928571428571429, 0.5892857142857143, - 0.125, 0.5892857142857143, - 0.125, 0.5892857142857143, - 0.16071428571428573, 0.5892857142857143, - 0.16071428571428573, 0.5892857142857143, - 0.19642857142857142, 0.5892857142857143, - 0.19642857142857142, 0.5892857142857143, - 0.23214285714285715, 0.5892857142857143, - 0.23214285714285715, 0.5892857142857143, - 0.26785714285714285, 0.5892857142857143, - 0.26785714285714285, 0.5892857142857143, - 0.30357142857142855, 0.5892857142857143, - 0.30357142857142855, 0.5892857142857143, - 0.3392857142857143, 0.5892857142857143, - 0.3392857142857143, 0.5892857142857143, - 0.375, 0.5892857142857143, - 0.375, 0.5892857142857143, - 0.4107142857142857, 0.5892857142857143, - 0.4107142857142857, 0.5892857142857143, - 0.44642857142857145, 0.5892857142857143, - 0.44642857142857145, 0.5892857142857143, - 0.48214285714285715, 0.5892857142857143, - 0.48214285714285715, 0.5892857142857143, - 0.5178571428571429, 0.5892857142857143, - 0.5178571428571429, 0.5892857142857143, - 0.5535714285714286, 0.5892857142857143, - 0.5535714285714286, 0.5892857142857143, - 0.5892857142857143, 0.5892857142857143, - 0.5892857142857143, 0.5892857142857143, - 0.625, 0.5892857142857143, - 0.625, 0.5892857142857143, - 0.6607142857142857, 0.5892857142857143, - 0.6607142857142857, 0.5892857142857143, - 0.6964285714285714, 0.5892857142857143, - 0.6964285714285714, 0.5892857142857143, - 0.7321428571428571, 0.5892857142857143, - 0.7321428571428571, 0.5892857142857143, - 0.7678571428571429, 0.5892857142857143, - 0.7678571428571429, 0.5892857142857143, - 0.8035714285714286, 0.5892857142857143, - 0.8035714285714286, 0.5892857142857143, - 0.8392857142857143, 0.5892857142857143, - 0.8392857142857143, 0.5892857142857143, - 0.875, 0.5892857142857143, - 0.875, 0.5892857142857143, - 0.9107142857142857, 0.5892857142857143, - 0.9107142857142857, 0.5892857142857143, - 0.9464285714285714, 0.5892857142857143, - 0.9464285714285714, 0.5892857142857143, - 0.9821428571428571, 0.5892857142857143, - 0.9821428571428571, 0.5892857142857143, - 0.017857142857142856, 0.625, - 0.017857142857142856, 0.625, - 0.05357142857142857, 0.625, - 0.05357142857142857, 0.625, - 0.08928571428571429, 0.625, - 0.08928571428571429, 0.625, - 0.125, 0.625, - 0.125, 0.625, - 0.16071428571428573, 0.625, - 0.16071428571428573, 0.625, - 0.19642857142857142, 0.625, - 0.19642857142857142, 0.625, - 0.23214285714285715, 0.625, - 0.23214285714285715, 0.625, - 0.26785714285714285, 0.625, - 0.26785714285714285, 0.625, - 0.30357142857142855, 0.625, - 0.30357142857142855, 0.625, - 0.3392857142857143, 0.625, - 0.3392857142857143, 0.625, - 0.375, 0.625, - 0.375, 0.625, - 0.4107142857142857, 0.625, - 0.4107142857142857, 0.625, - 0.44642857142857145, 0.625, - 0.44642857142857145, 0.625, - 0.48214285714285715, 0.625, - 0.48214285714285715, 0.625, - 0.5178571428571429, 0.625, - 0.5178571428571429, 0.625, - 0.5535714285714286, 0.625, - 0.5535714285714286, 0.625, - 0.5892857142857143, 0.625, - 0.5892857142857143, 0.625, - 0.625, 0.625, - 0.625, 0.625, - 0.6607142857142857, 0.625, - 0.6607142857142857, 0.625, - 0.6964285714285714, 0.625, - 0.6964285714285714, 0.625, - 0.7321428571428571, 0.625, - 0.7321428571428571, 0.625, - 0.7678571428571429, 0.625, - 0.7678571428571429, 0.625, - 0.8035714285714286, 0.625, - 0.8035714285714286, 0.625, - 0.8392857142857143, 0.625, - 0.8392857142857143, 0.625, - 0.875, 0.625, - 0.875, 0.625, - 0.9107142857142857, 0.625, - 0.9107142857142857, 0.625, - 0.9464285714285714, 0.625, - 0.9464285714285714, 0.625, - 0.9821428571428571, 0.625, - 0.9821428571428571, 0.625, - 0.017857142857142856, 0.6607142857142857, - 0.017857142857142856, 0.6607142857142857, - 0.05357142857142857, 0.6607142857142857, - 0.05357142857142857, 0.6607142857142857, - 0.08928571428571429, 0.6607142857142857, - 0.08928571428571429, 0.6607142857142857, - 0.125, 0.6607142857142857, - 0.125, 0.6607142857142857, - 0.16071428571428573, 0.6607142857142857, - 0.16071428571428573, 0.6607142857142857, - 0.19642857142857142, 0.6607142857142857, - 0.19642857142857142, 0.6607142857142857, - 0.23214285714285715, 0.6607142857142857, - 0.23214285714285715, 0.6607142857142857, - 0.26785714285714285, 0.6607142857142857, - 0.26785714285714285, 0.6607142857142857, - 0.30357142857142855, 0.6607142857142857, - 0.30357142857142855, 0.6607142857142857, - 0.3392857142857143, 0.6607142857142857, - 0.3392857142857143, 0.6607142857142857, - 0.375, 0.6607142857142857, - 0.375, 0.6607142857142857, - 0.4107142857142857, 0.6607142857142857, - 0.4107142857142857, 0.6607142857142857, - 0.44642857142857145, 0.6607142857142857, - 0.44642857142857145, 0.6607142857142857, - 0.48214285714285715, 0.6607142857142857, - 0.48214285714285715, 0.6607142857142857, - 0.5178571428571429, 0.6607142857142857, - 0.5178571428571429, 0.6607142857142857, - 0.5535714285714286, 0.6607142857142857, - 0.5535714285714286, 0.6607142857142857, - 0.5892857142857143, 0.6607142857142857, - 0.5892857142857143, 0.6607142857142857, - 0.625, 0.6607142857142857, - 0.625, 0.6607142857142857, - 0.6607142857142857, 0.6607142857142857, - 0.6607142857142857, 0.6607142857142857, - 0.6964285714285714, 0.6607142857142857, - 0.6964285714285714, 0.6607142857142857, - 0.7321428571428571, 0.6607142857142857, - 0.7321428571428571, 0.6607142857142857, - 0.7678571428571429, 0.6607142857142857, - 0.7678571428571429, 0.6607142857142857, - 0.8035714285714286, 0.6607142857142857, - 0.8035714285714286, 0.6607142857142857, - 0.8392857142857143, 0.6607142857142857, - 0.8392857142857143, 0.6607142857142857, - 0.875, 0.6607142857142857, - 0.875, 0.6607142857142857, - 0.9107142857142857, 0.6607142857142857, - 0.9107142857142857, 0.6607142857142857, - 0.9464285714285714, 0.6607142857142857, - 0.9464285714285714, 0.6607142857142857, - 0.9821428571428571, 0.6607142857142857, - 0.9821428571428571, 0.6607142857142857, - 0.017857142857142856, 0.6964285714285714, - 0.017857142857142856, 0.6964285714285714, - 0.05357142857142857, 0.6964285714285714, - 0.05357142857142857, 0.6964285714285714, - 0.08928571428571429, 0.6964285714285714, - 0.08928571428571429, 0.6964285714285714, - 0.125, 0.6964285714285714, - 0.125, 0.6964285714285714, - 0.16071428571428573, 0.6964285714285714, - 0.16071428571428573, 0.6964285714285714, - 0.19642857142857142, 0.6964285714285714, - 0.19642857142857142, 0.6964285714285714, - 0.23214285714285715, 0.6964285714285714, - 0.23214285714285715, 0.6964285714285714, - 0.26785714285714285, 0.6964285714285714, - 0.26785714285714285, 0.6964285714285714, - 0.30357142857142855, 0.6964285714285714, - 0.30357142857142855, 0.6964285714285714, - 0.3392857142857143, 0.6964285714285714, - 0.3392857142857143, 0.6964285714285714, - 0.375, 0.6964285714285714, - 0.375, 0.6964285714285714, - 0.4107142857142857, 0.6964285714285714, - 0.4107142857142857, 0.6964285714285714, - 0.44642857142857145, 0.6964285714285714, - 0.44642857142857145, 0.6964285714285714, - 0.48214285714285715, 0.6964285714285714, - 0.48214285714285715, 0.6964285714285714, - 0.5178571428571429, 0.6964285714285714, - 0.5178571428571429, 0.6964285714285714, - 0.5535714285714286, 0.6964285714285714, - 0.5535714285714286, 0.6964285714285714, - 0.5892857142857143, 0.6964285714285714, - 0.5892857142857143, 0.6964285714285714, - 0.625, 0.6964285714285714, - 0.625, 0.6964285714285714, - 0.6607142857142857, 0.6964285714285714, - 0.6607142857142857, 0.6964285714285714, - 0.6964285714285714, 0.6964285714285714, - 0.6964285714285714, 0.6964285714285714, - 0.7321428571428571, 0.6964285714285714, - 0.7321428571428571, 0.6964285714285714, - 0.7678571428571429, 0.6964285714285714, - 0.7678571428571429, 0.6964285714285714, - 0.8035714285714286, 0.6964285714285714, - 0.8035714285714286, 0.6964285714285714, - 0.8392857142857143, 0.6964285714285714, - 0.8392857142857143, 0.6964285714285714, - 0.875, 0.6964285714285714, - 0.875, 0.6964285714285714, - 0.9107142857142857, 0.6964285714285714, - 0.9107142857142857, 0.6964285714285714, - 0.9464285714285714, 0.6964285714285714, - 0.9464285714285714, 0.6964285714285714, - 0.9821428571428571, 0.6964285714285714, - 0.9821428571428571, 0.6964285714285714, - 0.017857142857142856, 0.7321428571428571, - 0.017857142857142856, 0.7321428571428571, - 0.05357142857142857, 0.7321428571428571, - 0.05357142857142857, 0.7321428571428571, - 0.08928571428571429, 0.7321428571428571, - 0.08928571428571429, 0.7321428571428571, - 0.125, 0.7321428571428571, - 0.125, 0.7321428571428571, - 0.16071428571428573, 0.7321428571428571, - 0.16071428571428573, 0.7321428571428571, - 0.19642857142857142, 0.7321428571428571, - 0.19642857142857142, 0.7321428571428571, - 0.23214285714285715, 0.7321428571428571, - 0.23214285714285715, 0.7321428571428571, - 0.26785714285714285, 0.7321428571428571, - 0.26785714285714285, 0.7321428571428571, - 0.30357142857142855, 0.7321428571428571, - 0.30357142857142855, 0.7321428571428571, - 0.3392857142857143, 0.7321428571428571, - 0.3392857142857143, 0.7321428571428571, - 0.375, 0.7321428571428571, - 0.375, 0.7321428571428571, - 0.4107142857142857, 0.7321428571428571, - 0.4107142857142857, 0.7321428571428571, - 0.44642857142857145, 0.7321428571428571, - 0.44642857142857145, 0.7321428571428571, - 0.48214285714285715, 0.7321428571428571, - 0.48214285714285715, 0.7321428571428571, - 0.5178571428571429, 0.7321428571428571, - 0.5178571428571429, 0.7321428571428571, - 0.5535714285714286, 0.7321428571428571, - 0.5535714285714286, 0.7321428571428571, - 0.5892857142857143, 0.7321428571428571, - 0.5892857142857143, 0.7321428571428571, - 0.625, 0.7321428571428571, - 0.625, 0.7321428571428571, - 0.6607142857142857, 0.7321428571428571, - 0.6607142857142857, 0.7321428571428571, - 0.6964285714285714, 0.7321428571428571, - 0.6964285714285714, 0.7321428571428571, - 0.7321428571428571, 0.7321428571428571, - 0.7321428571428571, 0.7321428571428571, - 0.7678571428571429, 0.7321428571428571, - 0.7678571428571429, 0.7321428571428571, - 0.8035714285714286, 0.7321428571428571, - 0.8035714285714286, 0.7321428571428571, - 0.8392857142857143, 0.7321428571428571, - 0.8392857142857143, 0.7321428571428571, - 0.875, 0.7321428571428571, - 0.875, 0.7321428571428571, - 0.9107142857142857, 0.7321428571428571, - 0.9107142857142857, 0.7321428571428571, - 0.9464285714285714, 0.7321428571428571, - 0.9464285714285714, 0.7321428571428571, - 0.9821428571428571, 0.7321428571428571, - 0.9821428571428571, 0.7321428571428571, - 0.017857142857142856, 0.7678571428571429, - 0.017857142857142856, 0.7678571428571429, - 0.05357142857142857, 0.7678571428571429, - 0.05357142857142857, 0.7678571428571429, - 0.08928571428571429, 0.7678571428571429, - 0.08928571428571429, 0.7678571428571429, - 0.125, 0.7678571428571429, - 0.125, 0.7678571428571429, - 0.16071428571428573, 0.7678571428571429, - 0.16071428571428573, 0.7678571428571429, - 0.19642857142857142, 0.7678571428571429, - 0.19642857142857142, 0.7678571428571429, - 0.23214285714285715, 0.7678571428571429, - 0.23214285714285715, 0.7678571428571429, - 0.26785714285714285, 0.7678571428571429, - 0.26785714285714285, 0.7678571428571429, - 0.30357142857142855, 0.7678571428571429, - 0.30357142857142855, 0.7678571428571429, - 0.3392857142857143, 0.7678571428571429, - 0.3392857142857143, 0.7678571428571429, - 0.375, 0.7678571428571429, - 0.375, 0.7678571428571429, - 0.4107142857142857, 0.7678571428571429, - 0.4107142857142857, 0.7678571428571429, - 0.44642857142857145, 0.7678571428571429, - 0.44642857142857145, 0.7678571428571429, - 0.48214285714285715, 0.7678571428571429, - 0.48214285714285715, 0.7678571428571429, - 0.5178571428571429, 0.7678571428571429, - 0.5178571428571429, 0.7678571428571429, - 0.5535714285714286, 0.7678571428571429, - 0.5535714285714286, 0.7678571428571429, - 0.5892857142857143, 0.7678571428571429, - 0.5892857142857143, 0.7678571428571429, - 0.625, 0.7678571428571429, - 0.625, 0.7678571428571429, - 0.6607142857142857, 0.7678571428571429, - 0.6607142857142857, 0.7678571428571429, - 0.6964285714285714, 0.7678571428571429, - 0.6964285714285714, 0.7678571428571429, - 0.7321428571428571, 0.7678571428571429, - 0.7321428571428571, 0.7678571428571429, - 0.7678571428571429, 0.7678571428571429, - 0.7678571428571429, 0.7678571428571429, - 0.8035714285714286, 0.7678571428571429, - 0.8035714285714286, 0.7678571428571429, - 0.8392857142857143, 0.7678571428571429, - 0.8392857142857143, 0.7678571428571429, - 0.875, 0.7678571428571429, - 0.875, 0.7678571428571429, - 0.9107142857142857, 0.7678571428571429, - 0.9107142857142857, 0.7678571428571429, - 0.9464285714285714, 0.7678571428571429, - 0.9464285714285714, 0.7678571428571429, - 0.9821428571428571, 0.7678571428571429, - 0.9821428571428571, 0.7678571428571429, - 0.017857142857142856, 0.8035714285714286, - 0.017857142857142856, 0.8035714285714286, - 0.05357142857142857, 0.8035714285714286, - 0.05357142857142857, 0.8035714285714286, - 0.08928571428571429, 0.8035714285714286, - 0.08928571428571429, 0.8035714285714286, - 0.125, 0.8035714285714286, - 0.125, 0.8035714285714286, - 0.16071428571428573, 0.8035714285714286, - 0.16071428571428573, 0.8035714285714286, - 0.19642857142857142, 0.8035714285714286, - 0.19642857142857142, 0.8035714285714286, - 0.23214285714285715, 0.8035714285714286, - 0.23214285714285715, 0.8035714285714286, - 0.26785714285714285, 0.8035714285714286, - 0.26785714285714285, 0.8035714285714286, - 0.30357142857142855, 0.8035714285714286, - 0.30357142857142855, 0.8035714285714286, - 0.3392857142857143, 0.8035714285714286, - 0.3392857142857143, 0.8035714285714286, - 0.375, 0.8035714285714286, - 0.375, 0.8035714285714286, - 0.4107142857142857, 0.8035714285714286, - 0.4107142857142857, 0.8035714285714286, - 0.44642857142857145, 0.8035714285714286, - 0.44642857142857145, 0.8035714285714286, - 0.48214285714285715, 0.8035714285714286, - 0.48214285714285715, 0.8035714285714286, - 0.5178571428571429, 0.8035714285714286, - 0.5178571428571429, 0.8035714285714286, - 0.5535714285714286, 0.8035714285714286, - 0.5535714285714286, 0.8035714285714286, - 0.5892857142857143, 0.8035714285714286, - 0.5892857142857143, 0.8035714285714286, - 0.625, 0.8035714285714286, - 0.625, 0.8035714285714286, - 0.6607142857142857, 0.8035714285714286, - 0.6607142857142857, 0.8035714285714286, - 0.6964285714285714, 0.8035714285714286, - 0.6964285714285714, 0.8035714285714286, - 0.7321428571428571, 0.8035714285714286, - 0.7321428571428571, 0.8035714285714286, - 0.7678571428571429, 0.8035714285714286, - 0.7678571428571429, 0.8035714285714286, - 0.8035714285714286, 0.8035714285714286, - 0.8035714285714286, 0.8035714285714286, - 0.8392857142857143, 0.8035714285714286, - 0.8392857142857143, 0.8035714285714286, - 0.875, 0.8035714285714286, - 0.875, 0.8035714285714286, - 0.9107142857142857, 0.8035714285714286, - 0.9107142857142857, 0.8035714285714286, - 0.9464285714285714, 0.8035714285714286, - 0.9464285714285714, 0.8035714285714286, - 0.9821428571428571, 0.8035714285714286, - 0.9821428571428571, 0.8035714285714286, - 0.017857142857142856, 0.8392857142857143, - 0.017857142857142856, 0.8392857142857143, - 0.05357142857142857, 0.8392857142857143, - 0.05357142857142857, 0.8392857142857143, - 0.08928571428571429, 0.8392857142857143, - 0.08928571428571429, 0.8392857142857143, - 0.125, 0.8392857142857143, - 0.125, 0.8392857142857143, - 0.16071428571428573, 0.8392857142857143, - 0.16071428571428573, 0.8392857142857143, - 0.19642857142857142, 0.8392857142857143, - 0.19642857142857142, 0.8392857142857143, - 0.23214285714285715, 0.8392857142857143, - 0.23214285714285715, 0.8392857142857143, - 0.26785714285714285, 0.8392857142857143, - 0.26785714285714285, 0.8392857142857143, - 0.30357142857142855, 0.8392857142857143, - 0.30357142857142855, 0.8392857142857143, - 0.3392857142857143, 0.8392857142857143, - 0.3392857142857143, 0.8392857142857143, - 0.375, 0.8392857142857143, - 0.375, 0.8392857142857143, - 0.4107142857142857, 0.8392857142857143, - 0.4107142857142857, 0.8392857142857143, - 0.44642857142857145, 0.8392857142857143, - 0.44642857142857145, 0.8392857142857143, - 0.48214285714285715, 0.8392857142857143, - 0.48214285714285715, 0.8392857142857143, - 0.5178571428571429, 0.8392857142857143, - 0.5178571428571429, 0.8392857142857143, - 0.5535714285714286, 0.8392857142857143, - 0.5535714285714286, 0.8392857142857143, - 0.5892857142857143, 0.8392857142857143, - 0.5892857142857143, 0.8392857142857143, - 0.625, 0.8392857142857143, - 0.625, 0.8392857142857143, - 0.6607142857142857, 0.8392857142857143, - 0.6607142857142857, 0.8392857142857143, - 0.6964285714285714, 0.8392857142857143, - 0.6964285714285714, 0.8392857142857143, - 0.7321428571428571, 0.8392857142857143, - 0.7321428571428571, 0.8392857142857143, - 0.7678571428571429, 0.8392857142857143, - 0.7678571428571429, 0.8392857142857143, - 0.8035714285714286, 0.8392857142857143, - 0.8035714285714286, 0.8392857142857143, - 0.8392857142857143, 0.8392857142857143, - 0.8392857142857143, 0.8392857142857143, - 0.875, 0.8392857142857143, - 0.875, 0.8392857142857143, - 0.9107142857142857, 0.8392857142857143, - 0.9107142857142857, 0.8392857142857143, - 0.9464285714285714, 0.8392857142857143, - 0.9464285714285714, 0.8392857142857143, - 0.9821428571428571, 0.8392857142857143, - 0.9821428571428571, 0.8392857142857143, - 0.017857142857142856, 0.875, - 0.017857142857142856, 0.875, - 0.05357142857142857, 0.875, - 0.05357142857142857, 0.875, - 0.08928571428571429, 0.875, - 0.08928571428571429, 0.875, - 0.125, 0.875, - 0.125, 0.875, - 0.16071428571428573, 0.875, - 0.16071428571428573, 0.875, - 0.19642857142857142, 0.875, - 0.19642857142857142, 0.875, - 0.23214285714285715, 0.875, - 0.23214285714285715, 0.875, - 0.26785714285714285, 0.875, - 0.26785714285714285, 0.875, - 0.30357142857142855, 0.875, - 0.30357142857142855, 0.875, - 0.3392857142857143, 0.875, - 0.3392857142857143, 0.875, - 0.375, 0.875, - 0.375, 0.875, - 0.4107142857142857, 0.875, - 0.4107142857142857, 0.875, - 0.44642857142857145, 0.875, - 0.44642857142857145, 0.875, - 0.48214285714285715, 0.875, - 0.48214285714285715, 0.875, - 0.5178571428571429, 0.875, - 0.5178571428571429, 0.875, - 0.5535714285714286, 0.875, - 0.5535714285714286, 0.875, - 0.5892857142857143, 0.875, - 0.5892857142857143, 0.875, - 0.625, 0.875, - 0.625, 0.875, - 0.6607142857142857, 0.875, - 0.6607142857142857, 0.875, - 0.6964285714285714, 0.875, - 0.6964285714285714, 0.875, - 0.7321428571428571, 0.875, - 0.7321428571428571, 0.875, - 0.7678571428571429, 0.875, - 0.7678571428571429, 0.875, - 0.8035714285714286, 0.875, - 0.8035714285714286, 0.875, - 0.8392857142857143, 0.875, - 0.8392857142857143, 0.875, - 0.875, 0.875, - 0.875, 0.875, - 0.9107142857142857, 0.875, - 0.9107142857142857, 0.875, - 0.9464285714285714, 0.875, - 0.9464285714285714, 0.875, - 0.9821428571428571, 0.875, - 0.9821428571428571, 0.875, - 0.017857142857142856, 0.9107142857142857, - 0.017857142857142856, 0.9107142857142857, - 0.05357142857142857, 0.9107142857142857, - 0.05357142857142857, 0.9107142857142857, - 0.08928571428571429, 0.9107142857142857, - 0.08928571428571429, 0.9107142857142857, - 0.125, 0.9107142857142857, - 0.125, 0.9107142857142857, - 0.16071428571428573, 0.9107142857142857, - 0.16071428571428573, 0.9107142857142857, - 0.19642857142857142, 0.9107142857142857, - 0.19642857142857142, 0.9107142857142857, - 0.23214285714285715, 0.9107142857142857, - 0.23214285714285715, 0.9107142857142857, - 0.26785714285714285, 0.9107142857142857, - 0.26785714285714285, 0.9107142857142857, - 0.30357142857142855, 0.9107142857142857, - 0.30357142857142855, 0.9107142857142857, - 0.3392857142857143, 0.9107142857142857, - 0.3392857142857143, 0.9107142857142857, - 0.375, 0.9107142857142857, - 0.375, 0.9107142857142857, - 0.4107142857142857, 0.9107142857142857, - 0.4107142857142857, 0.9107142857142857, - 0.44642857142857145, 0.9107142857142857, - 0.44642857142857145, 0.9107142857142857, - 0.48214285714285715, 0.9107142857142857, - 0.48214285714285715, 0.9107142857142857, - 0.5178571428571429, 0.9107142857142857, - 0.5178571428571429, 0.9107142857142857, - 0.5535714285714286, 0.9107142857142857, - 0.5535714285714286, 0.9107142857142857, - 0.5892857142857143, 0.9107142857142857, - 0.5892857142857143, 0.9107142857142857, - 0.625, 0.9107142857142857, - 0.625, 0.9107142857142857, - 0.6607142857142857, 0.9107142857142857, - 0.6607142857142857, 0.9107142857142857, - 0.6964285714285714, 0.9107142857142857, - 0.6964285714285714, 0.9107142857142857, - 0.7321428571428571, 0.9107142857142857, - 0.7321428571428571, 0.9107142857142857, - 0.7678571428571429, 0.9107142857142857, - 0.7678571428571429, 0.9107142857142857, - 0.8035714285714286, 0.9107142857142857, - 0.8035714285714286, 0.9107142857142857, - 0.8392857142857143, 0.9107142857142857, - 0.8392857142857143, 0.9107142857142857, - 0.875, 0.9107142857142857, - 0.875, 0.9107142857142857, - 0.9107142857142857, 0.9107142857142857, - 0.9107142857142857, 0.9107142857142857, - 0.9464285714285714, 0.9107142857142857, - 0.9464285714285714, 0.9107142857142857, - 0.9821428571428571, 0.9107142857142857, - 0.9821428571428571, 0.9107142857142857, - 0.017857142857142856, 0.9464285714285714, - 0.017857142857142856, 0.9464285714285714, - 0.05357142857142857, 0.9464285714285714, - 0.05357142857142857, 0.9464285714285714, - 0.08928571428571429, 0.9464285714285714, - 0.08928571428571429, 0.9464285714285714, - 0.125, 0.9464285714285714, - 0.125, 0.9464285714285714, - 0.16071428571428573, 0.9464285714285714, - 0.16071428571428573, 0.9464285714285714, - 0.19642857142857142, 0.9464285714285714, - 0.19642857142857142, 0.9464285714285714, - 0.23214285714285715, 0.9464285714285714, - 0.23214285714285715, 0.9464285714285714, - 0.26785714285714285, 0.9464285714285714, - 0.26785714285714285, 0.9464285714285714, - 0.30357142857142855, 0.9464285714285714, - 0.30357142857142855, 0.9464285714285714, - 0.3392857142857143, 0.9464285714285714, - 0.3392857142857143, 0.9464285714285714, - 0.375, 0.9464285714285714, - 0.375, 0.9464285714285714, - 0.4107142857142857, 0.9464285714285714, - 0.4107142857142857, 0.9464285714285714, - 0.44642857142857145, 0.9464285714285714, - 0.44642857142857145, 0.9464285714285714, - 0.48214285714285715, 0.9464285714285714, - 0.48214285714285715, 0.9464285714285714, - 0.5178571428571429, 0.9464285714285714, - 0.5178571428571429, 0.9464285714285714, - 0.5535714285714286, 0.9464285714285714, - 0.5535714285714286, 0.9464285714285714, - 0.5892857142857143, 0.9464285714285714, - 0.5892857142857143, 0.9464285714285714, - 0.625, 0.9464285714285714, - 0.625, 0.9464285714285714, - 0.6607142857142857, 0.9464285714285714, - 0.6607142857142857, 0.9464285714285714, - 0.6964285714285714, 0.9464285714285714, - 0.6964285714285714, 0.9464285714285714, - 0.7321428571428571, 0.9464285714285714, - 0.7321428571428571, 0.9464285714285714, - 0.7678571428571429, 0.9464285714285714, - 0.7678571428571429, 0.9464285714285714, - 0.8035714285714286, 0.9464285714285714, - 0.8035714285714286, 0.9464285714285714, - 0.8392857142857143, 0.9464285714285714, - 0.8392857142857143, 0.9464285714285714, - 0.875, 0.9464285714285714, - 0.875, 0.9464285714285714, - 0.9107142857142857, 0.9464285714285714, - 0.9107142857142857, 0.9464285714285714, - 0.9464285714285714, 0.9464285714285714, - 0.9464285714285714, 0.9464285714285714, - 0.9821428571428571, 0.9464285714285714, - 0.9821428571428571, 0.9464285714285714, - 0.017857142857142856, 0.9821428571428571, - 0.017857142857142856, 0.9821428571428571, - 0.05357142857142857, 0.9821428571428571, - 0.05357142857142857, 0.9821428571428571, - 0.08928571428571429, 0.9821428571428571, - 0.08928571428571429, 0.9821428571428571, - 0.125, 0.9821428571428571, - 0.125, 0.9821428571428571, - 0.16071428571428573, 0.9821428571428571, - 0.16071428571428573, 0.9821428571428571, - 0.19642857142857142, 0.9821428571428571, - 0.19642857142857142, 0.9821428571428571, - 0.23214285714285715, 0.9821428571428571, - 0.23214285714285715, 0.9821428571428571, - 0.26785714285714285, 0.9821428571428571, - 0.26785714285714285, 0.9821428571428571, - 0.30357142857142855, 0.9821428571428571, - 0.30357142857142855, 0.9821428571428571, - 0.3392857142857143, 0.9821428571428571, - 0.3392857142857143, 0.9821428571428571, - 0.375, 0.9821428571428571, - 0.375, 0.9821428571428571, - 0.4107142857142857, 0.9821428571428571, - 0.4107142857142857, 0.9821428571428571, - 0.44642857142857145, 0.9821428571428571, - 0.44642857142857145, 0.9821428571428571, - 0.48214285714285715, 0.9821428571428571, - 0.48214285714285715, 0.9821428571428571, - 0.5178571428571429, 0.9821428571428571, - 0.5178571428571429, 0.9821428571428571, - 0.5535714285714286, 0.9821428571428571, - 0.5535714285714286, 0.9821428571428571, - 0.5892857142857143, 0.9821428571428571, - 0.5892857142857143, 0.9821428571428571, - 0.625, 0.9821428571428571, - 0.625, 0.9821428571428571, - 0.6607142857142857, 0.9821428571428571, - 0.6607142857142857, 0.9821428571428571, - 0.6964285714285714, 0.9821428571428571, - 0.6964285714285714, 0.9821428571428571, - 0.7321428571428571, 0.9821428571428571, - 0.7321428571428571, 0.9821428571428571, - 0.7678571428571429, 0.9821428571428571, - 0.7678571428571429, 0.9821428571428571, - 0.8035714285714286, 0.9821428571428571, - 0.8035714285714286, 0.9821428571428571, - 0.8392857142857143, 0.9821428571428571, - 0.8392857142857143, 0.9821428571428571, - 0.875, 0.9821428571428571, - 0.875, 0.9821428571428571, - 0.9107142857142857, 0.9821428571428571, - 0.9107142857142857, 0.9821428571428571, - 0.9464285714285714, 0.9821428571428571, - 0.9464285714285714, 0.9821428571428571, - 0.9821428571428571, 0.9821428571428571, - 0.9821428571428571, 0.9821428571428571, - 0.03571428571428571, 0.03571428571428571, - 0.03571428571428571, 0.03571428571428571, - 0.10714285714285714, 0.03571428571428571, - 0.10714285714285714, 0.03571428571428571, - 0.17857142857142858, 0.03571428571428571, - 0.17857142857142858, 0.03571428571428571, - 0.25, 0.03571428571428571, - 0.25, 0.03571428571428571, - 0.32142857142857145, 0.03571428571428571, - 0.32142857142857145, 0.03571428571428571, - 0.39285714285714285, 0.03571428571428571, - 0.39285714285714285, 0.03571428571428571, - 0.4642857142857143, 0.03571428571428571, - 0.4642857142857143, 0.03571428571428571, - 0.5357142857142857, 0.03571428571428571, - 0.5357142857142857, 0.03571428571428571, - 0.6071428571428571, 0.03571428571428571, - 0.6071428571428571, 0.03571428571428571, - 0.6785714285714286, 0.03571428571428571, - 0.6785714285714286, 0.03571428571428571, - 0.75, 0.03571428571428571, - 0.75, 0.03571428571428571, - 0.8214285714285714, 0.03571428571428571, - 0.8214285714285714, 0.03571428571428571, - 0.8928571428571429, 0.03571428571428571, - 0.8928571428571429, 0.03571428571428571, - 0.9642857142857143, 0.03571428571428571, - 0.9642857142857143, 0.03571428571428571, - 0.03571428571428571, 0.10714285714285714, - 0.03571428571428571, 0.10714285714285714, - 0.10714285714285714, 0.10714285714285714, - 0.10714285714285714, 0.10714285714285714, - 0.17857142857142858, 0.10714285714285714, - 0.17857142857142858, 0.10714285714285714, - 0.25, 0.10714285714285714, - 0.25, 0.10714285714285714, - 0.32142857142857145, 0.10714285714285714, - 0.32142857142857145, 0.10714285714285714, - 0.39285714285714285, 0.10714285714285714, - 0.39285714285714285, 0.10714285714285714, - 0.4642857142857143, 0.10714285714285714, - 0.4642857142857143, 0.10714285714285714, - 0.5357142857142857, 0.10714285714285714, - 0.5357142857142857, 0.10714285714285714, - 0.6071428571428571, 0.10714285714285714, - 0.6071428571428571, 0.10714285714285714, - 0.6785714285714286, 0.10714285714285714, - 0.6785714285714286, 0.10714285714285714, - 0.75, 0.10714285714285714, - 0.75, 0.10714285714285714, - 0.8214285714285714, 0.10714285714285714, - 0.8214285714285714, 0.10714285714285714, - 0.8928571428571429, 0.10714285714285714, - 0.8928571428571429, 0.10714285714285714, - 0.9642857142857143, 0.10714285714285714, - 0.9642857142857143, 0.10714285714285714, - 0.03571428571428571, 0.17857142857142858, - 0.03571428571428571, 0.17857142857142858, - 0.10714285714285714, 0.17857142857142858, - 0.10714285714285714, 0.17857142857142858, - 0.17857142857142858, 0.17857142857142858, - 0.17857142857142858, 0.17857142857142858, - 0.25, 0.17857142857142858, - 0.25, 0.17857142857142858, - 0.32142857142857145, 0.17857142857142858, - 0.32142857142857145, 0.17857142857142858, - 0.39285714285714285, 0.17857142857142858, - 0.39285714285714285, 0.17857142857142858, - 0.4642857142857143, 0.17857142857142858, - 0.4642857142857143, 0.17857142857142858, - 0.5357142857142857, 0.17857142857142858, - 0.5357142857142857, 0.17857142857142858, - 0.6071428571428571, 0.17857142857142858, - 0.6071428571428571, 0.17857142857142858, - 0.6785714285714286, 0.17857142857142858, - 0.6785714285714286, 0.17857142857142858, - 0.75, 0.17857142857142858, - 0.75, 0.17857142857142858, - 0.8214285714285714, 0.17857142857142858, - 0.8214285714285714, 0.17857142857142858, - 0.8928571428571429, 0.17857142857142858, - 0.8928571428571429, 0.17857142857142858, - 0.9642857142857143, 0.17857142857142858, - 0.9642857142857143, 0.17857142857142858, - 0.03571428571428571, 0.25, - 0.03571428571428571, 0.25, - 0.10714285714285714, 0.25, - 0.10714285714285714, 0.25, - 0.17857142857142858, 0.25, - 0.17857142857142858, 0.25, - 0.25, 0.25, - 0.25, 0.25, - 0.32142857142857145, 0.25, - 0.32142857142857145, 0.25, - 0.39285714285714285, 0.25, - 0.39285714285714285, 0.25, - 0.4642857142857143, 0.25, - 0.4642857142857143, 0.25, - 0.5357142857142857, 0.25, - 0.5357142857142857, 0.25, - 0.6071428571428571, 0.25, - 0.6071428571428571, 0.25, - 0.6785714285714286, 0.25, - 0.6785714285714286, 0.25, - 0.75, 0.25, - 0.75, 0.25, - 0.8214285714285714, 0.25, - 0.8214285714285714, 0.25, - 0.8928571428571429, 0.25, - 0.8928571428571429, 0.25, - 0.9642857142857143, 0.25, - 0.9642857142857143, 0.25, - 0.03571428571428571, 0.32142857142857145, - 0.03571428571428571, 0.32142857142857145, - 0.10714285714285714, 0.32142857142857145, - 0.10714285714285714, 0.32142857142857145, - 0.17857142857142858, 0.32142857142857145, - 0.17857142857142858, 0.32142857142857145, - 0.25, 0.32142857142857145, - 0.25, 0.32142857142857145, - 0.32142857142857145, 0.32142857142857145, - 0.32142857142857145, 0.32142857142857145, - 0.39285714285714285, 0.32142857142857145, - 0.39285714285714285, 0.32142857142857145, - 0.4642857142857143, 0.32142857142857145, - 0.4642857142857143, 0.32142857142857145, - 0.5357142857142857, 0.32142857142857145, - 0.5357142857142857, 0.32142857142857145, - 0.6071428571428571, 0.32142857142857145, - 0.6071428571428571, 0.32142857142857145, - 0.6785714285714286, 0.32142857142857145, - 0.6785714285714286, 0.32142857142857145, - 0.75, 0.32142857142857145, - 0.75, 0.32142857142857145, - 0.8214285714285714, 0.32142857142857145, - 0.8214285714285714, 0.32142857142857145, - 0.8928571428571429, 0.32142857142857145, - 0.8928571428571429, 0.32142857142857145, - 0.9642857142857143, 0.32142857142857145, - 0.9642857142857143, 0.32142857142857145, - 0.03571428571428571, 0.39285714285714285, - 0.03571428571428571, 0.39285714285714285, - 0.10714285714285714, 0.39285714285714285, - 0.10714285714285714, 0.39285714285714285, - 0.17857142857142858, 0.39285714285714285, - 0.17857142857142858, 0.39285714285714285, - 0.25, 0.39285714285714285, - 0.25, 0.39285714285714285, - 0.32142857142857145, 0.39285714285714285, - 0.32142857142857145, 0.39285714285714285, - 0.39285714285714285, 0.39285714285714285, - 0.39285714285714285, 0.39285714285714285, - 0.4642857142857143, 0.39285714285714285, - 0.4642857142857143, 0.39285714285714285, - 0.5357142857142857, 0.39285714285714285, - 0.5357142857142857, 0.39285714285714285, - 0.6071428571428571, 0.39285714285714285, - 0.6071428571428571, 0.39285714285714285, - 0.6785714285714286, 0.39285714285714285, - 0.6785714285714286, 0.39285714285714285, - 0.75, 0.39285714285714285, - 0.75, 0.39285714285714285, - 0.8214285714285714, 0.39285714285714285, - 0.8214285714285714, 0.39285714285714285, - 0.8928571428571429, 0.39285714285714285, - 0.8928571428571429, 0.39285714285714285, - 0.9642857142857143, 0.39285714285714285, - 0.9642857142857143, 0.39285714285714285, - 0.03571428571428571, 0.4642857142857143, - 0.03571428571428571, 0.4642857142857143, - 0.10714285714285714, 0.4642857142857143, - 0.10714285714285714, 0.4642857142857143, - 0.17857142857142858, 0.4642857142857143, - 0.17857142857142858, 0.4642857142857143, - 0.25, 0.4642857142857143, - 0.25, 0.4642857142857143, - 0.32142857142857145, 0.4642857142857143, - 0.32142857142857145, 0.4642857142857143, - 0.39285714285714285, 0.4642857142857143, - 0.39285714285714285, 0.4642857142857143, - 0.4642857142857143, 0.4642857142857143, - 0.4642857142857143, 0.4642857142857143, - 0.5357142857142857, 0.4642857142857143, - 0.5357142857142857, 0.4642857142857143, - 0.6071428571428571, 0.4642857142857143, - 0.6071428571428571, 0.4642857142857143, - 0.6785714285714286, 0.4642857142857143, - 0.6785714285714286, 0.4642857142857143, - 0.75, 0.4642857142857143, - 0.75, 0.4642857142857143, - 0.8214285714285714, 0.4642857142857143, - 0.8214285714285714, 0.4642857142857143, - 0.8928571428571429, 0.4642857142857143, - 0.8928571428571429, 0.4642857142857143, - 0.9642857142857143, 0.4642857142857143, - 0.9642857142857143, 0.4642857142857143, - 0.03571428571428571, 0.5357142857142857, - 0.03571428571428571, 0.5357142857142857, - 0.10714285714285714, 0.5357142857142857, - 0.10714285714285714, 0.5357142857142857, - 0.17857142857142858, 0.5357142857142857, - 0.17857142857142858, 0.5357142857142857, - 0.25, 0.5357142857142857, - 0.25, 0.5357142857142857, - 0.32142857142857145, 0.5357142857142857, - 0.32142857142857145, 0.5357142857142857, - 0.39285714285714285, 0.5357142857142857, - 0.39285714285714285, 0.5357142857142857, - 0.4642857142857143, 0.5357142857142857, - 0.4642857142857143, 0.5357142857142857, - 0.5357142857142857, 0.5357142857142857, - 0.5357142857142857, 0.5357142857142857, - 0.6071428571428571, 0.5357142857142857, - 0.6071428571428571, 0.5357142857142857, - 0.6785714285714286, 0.5357142857142857, - 0.6785714285714286, 0.5357142857142857, - 0.75, 0.5357142857142857, - 0.75, 0.5357142857142857, - 0.8214285714285714, 0.5357142857142857, - 0.8214285714285714, 0.5357142857142857, - 0.8928571428571429, 0.5357142857142857, - 0.8928571428571429, 0.5357142857142857, - 0.9642857142857143, 0.5357142857142857, - 0.9642857142857143, 0.5357142857142857, - 0.03571428571428571, 0.6071428571428571, - 0.03571428571428571, 0.6071428571428571, - 0.10714285714285714, 0.6071428571428571, - 0.10714285714285714, 0.6071428571428571, - 0.17857142857142858, 0.6071428571428571, - 0.17857142857142858, 0.6071428571428571, - 0.25, 0.6071428571428571, - 0.25, 0.6071428571428571, - 0.32142857142857145, 0.6071428571428571, - 0.32142857142857145, 0.6071428571428571, - 0.39285714285714285, 0.6071428571428571, - 0.39285714285714285, 0.6071428571428571, - 0.4642857142857143, 0.6071428571428571, - 0.4642857142857143, 0.6071428571428571, - 0.5357142857142857, 0.6071428571428571, - 0.5357142857142857, 0.6071428571428571, - 0.6071428571428571, 0.6071428571428571, - 0.6071428571428571, 0.6071428571428571, - 0.6785714285714286, 0.6071428571428571, - 0.6785714285714286, 0.6071428571428571, - 0.75, 0.6071428571428571, - 0.75, 0.6071428571428571, - 0.8214285714285714, 0.6071428571428571, - 0.8214285714285714, 0.6071428571428571, - 0.8928571428571429, 0.6071428571428571, - 0.8928571428571429, 0.6071428571428571, - 0.9642857142857143, 0.6071428571428571, - 0.9642857142857143, 0.6071428571428571, - 0.03571428571428571, 0.6785714285714286, - 0.03571428571428571, 0.6785714285714286, - 0.10714285714285714, 0.6785714285714286, - 0.10714285714285714, 0.6785714285714286, - 0.17857142857142858, 0.6785714285714286, - 0.17857142857142858, 0.6785714285714286, - 0.25, 0.6785714285714286, - 0.25, 0.6785714285714286, - 0.32142857142857145, 0.6785714285714286, - 0.32142857142857145, 0.6785714285714286, - 0.39285714285714285, 0.6785714285714286, - 0.39285714285714285, 0.6785714285714286, - 0.4642857142857143, 0.6785714285714286, - 0.4642857142857143, 0.6785714285714286, - 0.5357142857142857, 0.6785714285714286, - 0.5357142857142857, 0.6785714285714286, - 0.6071428571428571, 0.6785714285714286, - 0.6071428571428571, 0.6785714285714286, - 0.6785714285714286, 0.6785714285714286, - 0.6785714285714286, 0.6785714285714286, - 0.75, 0.6785714285714286, - 0.75, 0.6785714285714286, - 0.8214285714285714, 0.6785714285714286, - 0.8214285714285714, 0.6785714285714286, - 0.8928571428571429, 0.6785714285714286, - 0.8928571428571429, 0.6785714285714286, - 0.9642857142857143, 0.6785714285714286, - 0.9642857142857143, 0.6785714285714286, - 0.03571428571428571, 0.75, - 0.03571428571428571, 0.75, - 0.10714285714285714, 0.75, - 0.10714285714285714, 0.75, - 0.17857142857142858, 0.75, - 0.17857142857142858, 0.75, - 0.25, 0.75, - 0.25, 0.75, - 0.32142857142857145, 0.75, - 0.32142857142857145, 0.75, - 0.39285714285714285, 0.75, - 0.39285714285714285, 0.75, - 0.4642857142857143, 0.75, - 0.4642857142857143, 0.75, - 0.5357142857142857, 0.75, - 0.5357142857142857, 0.75, - 0.6071428571428571, 0.75, - 0.6071428571428571, 0.75, - 0.6785714285714286, 0.75, - 0.6785714285714286, 0.75, - 0.75, 0.75, - 0.75, 0.75, - 0.8214285714285714, 0.75, - 0.8214285714285714, 0.75, - 0.8928571428571429, 0.75, - 0.8928571428571429, 0.75, - 0.9642857142857143, 0.75, - 0.9642857142857143, 0.75, - 0.03571428571428571, 0.8214285714285714, - 0.03571428571428571, 0.8214285714285714, - 0.10714285714285714, 0.8214285714285714, - 0.10714285714285714, 0.8214285714285714, - 0.17857142857142858, 0.8214285714285714, - 0.17857142857142858, 0.8214285714285714, - 0.25, 0.8214285714285714, - 0.25, 0.8214285714285714, - 0.32142857142857145, 0.8214285714285714, - 0.32142857142857145, 0.8214285714285714, - 0.39285714285714285, 0.8214285714285714, - 0.39285714285714285, 0.8214285714285714, - 0.4642857142857143, 0.8214285714285714, - 0.4642857142857143, 0.8214285714285714, - 0.5357142857142857, 0.8214285714285714, - 0.5357142857142857, 0.8214285714285714, - 0.6071428571428571, 0.8214285714285714, - 0.6071428571428571, 0.8214285714285714, - 0.6785714285714286, 0.8214285714285714, - 0.6785714285714286, 0.8214285714285714, - 0.75, 0.8214285714285714, - 0.75, 0.8214285714285714, - 0.8214285714285714, 0.8214285714285714, - 0.8214285714285714, 0.8214285714285714, - 0.8928571428571429, 0.8214285714285714, - 0.8928571428571429, 0.8214285714285714, - 0.9642857142857143, 0.8214285714285714, - 0.9642857142857143, 0.8214285714285714, - 0.03571428571428571, 0.8928571428571429, - 0.03571428571428571, 0.8928571428571429, - 0.10714285714285714, 0.8928571428571429, - 0.10714285714285714, 0.8928571428571429, - 0.17857142857142858, 0.8928571428571429, - 0.17857142857142858, 0.8928571428571429, - 0.25, 0.8928571428571429, - 0.25, 0.8928571428571429, - 0.32142857142857145, 0.8928571428571429, - 0.32142857142857145, 0.8928571428571429, - 0.39285714285714285, 0.8928571428571429, - 0.39285714285714285, 0.8928571428571429, - 0.4642857142857143, 0.8928571428571429, - 0.4642857142857143, 0.8928571428571429, - 0.5357142857142857, 0.8928571428571429, - 0.5357142857142857, 0.8928571428571429, - 0.6071428571428571, 0.8928571428571429, - 0.6071428571428571, 0.8928571428571429, - 0.6785714285714286, 0.8928571428571429, - 0.6785714285714286, 0.8928571428571429, - 0.75, 0.8928571428571429, - 0.75, 0.8928571428571429, - 0.8214285714285714, 0.8928571428571429, - 0.8214285714285714, 0.8928571428571429, - 0.8928571428571429, 0.8928571428571429, - 0.8928571428571429, 0.8928571428571429, - 0.9642857142857143, 0.8928571428571429, - 0.9642857142857143, 0.8928571428571429, - 0.03571428571428571, 0.9642857142857143, - 0.03571428571428571, 0.9642857142857143, - 0.10714285714285714, 0.9642857142857143, - 0.10714285714285714, 0.9642857142857143, - 0.17857142857142858, 0.9642857142857143, - 0.17857142857142858, 0.9642857142857143, - 0.25, 0.9642857142857143, - 0.25, 0.9642857142857143, - 0.32142857142857145, 0.9642857142857143, - 0.32142857142857145, 0.9642857142857143, - 0.39285714285714285, 0.9642857142857143, - 0.39285714285714285, 0.9642857142857143, - 0.4642857142857143, 0.9642857142857143, - 0.4642857142857143, 0.9642857142857143, - 0.5357142857142857, 0.9642857142857143, - 0.5357142857142857, 0.9642857142857143, - 0.6071428571428571, 0.9642857142857143, - 0.6071428571428571, 0.9642857142857143, - 0.6785714285714286, 0.9642857142857143, - 0.6785714285714286, 0.9642857142857143, - 0.75, 0.9642857142857143, - 0.75, 0.9642857142857143, - 0.8214285714285714, 0.9642857142857143, - 0.8214285714285714, 0.9642857142857143, - 0.8928571428571429, 0.9642857142857143, - 0.8928571428571429, 0.9642857142857143, - 0.9642857142857143, 0.9642857142857143, - 0.9642857142857143, 0.9642857142857143, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.07142857142857142, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.21428571428571427, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.35714285714285715, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.5, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.6428571428571429, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.7857142857142857, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.9285714285714286, 0.07142857142857142, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.07142857142857142, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.21428571428571427, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.35714285714285715, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.5, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.6428571428571429, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.7857142857142857, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.9285714285714286, 0.21428571428571427, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.07142857142857142, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.21428571428571427, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.35714285714285715, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.5, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.6428571428571429, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.7857142857142857, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.9285714285714286, 0.35714285714285715, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.07142857142857142, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.21428571428571427, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.35714285714285715, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.5, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.6428571428571429, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.7857142857142857, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.9285714285714286, 0.5, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.07142857142857142, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.21428571428571427, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.35714285714285715, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.5, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.6428571428571429, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.7857142857142857, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.9285714285714286, 0.6428571428571429, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.07142857142857142, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.21428571428571427, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.35714285714285715, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.5, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.6428571428571429, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.7857142857142857, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.9285714285714286, 0.7857142857142857, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.07142857142857142, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.21428571428571427, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.35714285714285715, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.5, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.6428571428571429, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.7857142857142857, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286, - 0.9285714285714286, 0.9285714285714286); - return anchor; -} diff --git a/models/pose_estimation_mediapipe/demo.py b/models/pose_estimation_mediapipe/demo.py deleted file mode 100644 index 4b70a1e0..00000000 --- a/models/pose_estimation_mediapipe/demo.py +++ /dev/null @@ -1,253 +0,0 @@ -import sys -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from mp_pose import MPPose - -sys.path.append('../person_detection_mediapipe') -from mp_persondet import MPPersonDet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='Pose Estimation from MediaPipe') -parser.add_argument('--input', '-i', type=str, - help='Path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='./pose_estimation_mediapipe_2023mar.onnx', - help='Path to the model.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--conf_threshold', type=float, default=0.8, - help='Filter out hands of confidence < conf_threshold.') -parser.add_argument('--save', '-s', action='store_true', - help='Specify to save results. This flag is invalid when using camera.') -parser.add_argument('--vis', '-v', action='store_true', - help='Specify to open a window for result visualization. This flag is invalid when using camera.') -args = parser.parse_args() - -def visualize(image, poses): - display_screen = image.copy() - display_3d = np.zeros((400, 400, 3), np.uint8) - cv.line(display_3d, (200, 0), (200, 400), (255, 255, 255), 2) - cv.line(display_3d, (0, 200), (400, 200), (255, 255, 255), 2) - cv.putText(display_3d, 'Main View', (0, 12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - cv.putText(display_3d, 'Top View', (200, 12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - cv.putText(display_3d, 'Left View', (0, 212), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - cv.putText(display_3d, 'Right View', (200, 212), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - is_draw = False # ensure only one person is drawn - - def _draw_lines(image, landmarks, keep_landmarks, is_draw_point=True, thickness=2): - - def _draw_by_presence(idx1, idx2): - if keep_landmarks[idx1] and keep_landmarks[idx2]: - cv.line(image, landmarks[idx1], landmarks[idx2], (255, 255, 255), thickness) - - _draw_by_presence(0, 1) - _draw_by_presence(1, 2) - _draw_by_presence(2, 3) - _draw_by_presence(3, 7) - _draw_by_presence(0, 4) - _draw_by_presence(4, 5) - _draw_by_presence(5, 6) - _draw_by_presence(6, 8) - - _draw_by_presence(9, 10) - - _draw_by_presence(12, 14) - _draw_by_presence(14, 16) - _draw_by_presence(16, 22) - _draw_by_presence(16, 18) - _draw_by_presence(16, 20) - _draw_by_presence(18, 20) - - _draw_by_presence(11, 13) - _draw_by_presence(13, 15) - _draw_by_presence(15, 21) - _draw_by_presence(15, 19) - _draw_by_presence(15, 17) - _draw_by_presence(17, 19) - - _draw_by_presence(11, 12) - _draw_by_presence(11, 23) - _draw_by_presence(23, 24) - _draw_by_presence(24, 12) - - _draw_by_presence(24, 26) - _draw_by_presence(26, 28) - _draw_by_presence(28, 30) - _draw_by_presence(28, 32) - _draw_by_presence(30, 32) - - _draw_by_presence(23, 25) - _draw_by_presence(25, 27) - _draw_by_presence(27, 31) - _draw_by_presence(27, 29) - _draw_by_presence(29, 31) - - if is_draw_point: - for i, p in enumerate(landmarks): - if keep_landmarks[i]: - cv.circle(image, p, thickness, (0, 0, 255), -1) - - for idx, pose in enumerate(poses): - bbox, landmarks_screen, landmarks_word, mask, heatmap, conf = pose - - edges = cv.Canny(mask, 100, 200) - kernel = np.ones((2, 2), np.uint8) # expansion edge to 2 pixels - edges = cv.dilate(edges, kernel, iterations=1) - edges_bgr = cv.cvtColor(edges, cv.COLOR_GRAY2BGR) - edges_bgr[edges == 255] = [0, 255, 0] - display_screen = cv.add(edges_bgr, display_screen) - - - # draw box - bbox = bbox.astype(np.int32) - cv.rectangle(display_screen, bbox[0], bbox[1], (0, 255, 0), 2) - cv.putText(display_screen, '{:.4f}'.format(conf), (bbox[0][0], bbox[0][1] + 12), cv.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) - # Draw line between each key points - landmarks_screen = landmarks_screen[:-6, :] - landmarks_word = landmarks_word[:-6, :] - - keep_landmarks = landmarks_screen[:, 4] > 0.8 # only show visible keypoints which presence bigger than 0.8 - - landmarks_screen = landmarks_screen - landmarks_word = landmarks_word - - landmarks_xy = landmarks_screen[:, 0: 2].astype(np.int32) - _draw_lines(display_screen, landmarks_xy, keep_landmarks, is_draw_point=False) - - # z value is relative to HIP, but we use constant to instead - for i, p in enumerate(landmarks_screen[:, 0: 3].astype(np.int32)): - if keep_landmarks[i]: - cv.circle(display_screen, np.array([p[0], p[1]]), 2, (0, 0, 255), -1) - - if is_draw is False: - is_draw = True - # Main view - landmarks_xy = landmarks_word[:, [0, 1]] - landmarks_xy = (landmarks_xy * 100 + 100).astype(np.int32) - _draw_lines(display_3d, landmarks_xy, keep_landmarks, thickness=2) - - # Top view - landmarks_xz = landmarks_word[:, [0, 2]] - landmarks_xz[:, 1] = -landmarks_xz[:, 1] - landmarks_xz = (landmarks_xz * 100 + np.array([300, 100])).astype(np.int32) - _draw_lines(display_3d, landmarks_xz,keep_landmarks, thickness=2) - - # Left view - landmarks_yz = landmarks_word[:, [2, 1]] - landmarks_yz[:, 0] = -landmarks_yz[:, 0] - landmarks_yz = (landmarks_yz * 100 + np.array([100, 300])).astype(np.int32) - _draw_lines(display_3d, landmarks_yz, keep_landmarks, thickness=2) - - # Right view - landmarks_zy = landmarks_word[:, [2, 1]] - landmarks_zy = (landmarks_zy * 100 + np.array([300, 300])).astype(np.int32) - _draw_lines(display_3d, landmarks_zy, keep_landmarks, thickness=2) - - return display_screen, display_3d - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # person detector - person_detector = MPPersonDet(modelPath='../person_detection_mediapipe/person_detection_mediapipe_2023mar.onnx', - nmsThreshold=0.3, - scoreThreshold=0.5, - topK=5000, # usually only one person has good performance - backendId=backend_id, - targetId=target_id) - # pose estimator - pose_estimator = MPPose(modelPath=args.model, - confThreshold=args.conf_threshold, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - image = cv.imread(args.input) - - # person detector inference - persons = person_detector.infer(image) - poses = [] - - # Estimate the pose of each person - for person in persons: - # pose estimator inference - pose = pose_estimator.infer(image, person) - if pose is not None: - poses.append(pose) - # Draw results on the input image - image, view_3d = visualize(image, poses) - - if len(persons) == 0: - print('No person detected!') - else: - print('Person detected!') - - # Save results - if args.save: - cv.imwrite('result.jpg', image) - print('Results saved to result.jpg\n') - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.imshow('3D Pose Demo', view_3d) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # person detector inference - persons = person_detector.infer(frame) - poses = [] - - tm.start() - # Estimate the pose of each person - for person in persons: - # pose detector inference - pose = pose_estimator.infer(frame, person) - if pose is not None: - poses.append(pose) - tm.stop() - # Draw results on the input image - frame, view_3d = visualize(frame, poses) - - if len(persons) == 0: - print('No person detected!') - else: - print('Person detected!') - cv.putText(frame, 'FPS: {:.2f}'.format(tm.getFPS()), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255)) - - cv.imshow('MediaPipe Pose Detection Demo', frame) - cv.imshow('3D Pose Demo', view_3d) - tm.reset() diff --git a/models/pose_estimation_mediapipe/example_outputs/mpposeest_demo.webp b/models/pose_estimation_mediapipe/example_outputs/mpposeest_demo.webp deleted file mode 100644 index 2e43b190..00000000 --- a/models/pose_estimation_mediapipe/example_outputs/mpposeest_demo.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f95c6e80fa90dd22b06a88b95d8dac512e52192d8367ea6b5f576bd667df3d4c -size 1564162 diff --git a/models/pose_estimation_mediapipe/example_outputs/pose_landmarks.png b/models/pose_estimation_mediapipe/example_outputs/pose_landmarks.png deleted file mode 100644 index 50441c72..00000000 --- a/models/pose_estimation_mediapipe/example_outputs/pose_landmarks.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c93063a83adff4db00c02aec8cf04d7444ae9169956c8ec67ee2351adbcd8c0f -size 123013 diff --git a/models/pose_estimation_mediapipe/mp_pose.py b/models/pose_estimation_mediapipe/mp_pose.py deleted file mode 100644 index 86348b6e..00000000 --- a/models/pose_estimation_mediapipe/mp_pose.py +++ /dev/null @@ -1,179 +0,0 @@ -import numpy as np -import cv2 as cv - -class MPPose: - def __init__(self, modelPath, confThreshold=0.5, backendId=0, targetId=0): - self.model_path = modelPath - self.conf_threshold = confThreshold - self.backend_id = backendId - self.target_id = targetId - - self.input_size = np.array([256, 256]) # wh - # RoI will be larger so the performance will be better, but preprocess will be slower. Default to 1. - self.PERSON_BOX_PRE_ENLARGE_FACTOR = 1 - self.PERSON_BOX_ENLARGE_FACTOR = 1.25 - - self.model = cv.dnn.readNet(self.model_path) - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self.model.setPreferableBackend(self.backend_id) - self.model.setPreferableTarget(self.target_id) - - def _preprocess(self, image, person): - ''' - Rotate input for inference. - Parameters: - image - input image of BGR channel order - face_bbox - human face bounding box found in image of format [[x1, y1], [x2, y2]] (top-left and bottom-right points) - person_landmarks - 4 landmarks (2 full body points, 2 upper body points) of shape [4, 2] - Returns: - rotated_person - rotated person image for inference - rotate_person_bbox - person box of interest range - angle - rotate angle for person - rotation_matrix - matrix for rotation and de-rotation - pad_bias - pad pixels of interest range - ''' - # crop and pad image to interest range - pad_bias = np.array([0, 0], dtype=np.int32) # left, top - person_keypoints = person[4: 12].reshape(-1, 2) - mid_hip_point = person_keypoints[0] - full_body_point = person_keypoints[1] - # get RoI - full_dist = np.linalg.norm(mid_hip_point - full_body_point) - full_bbox = np.array([mid_hip_point - full_dist, mid_hip_point + full_dist], np.int32) - # enlarge to make sure full body can be cover - center_bbox = np.sum(full_bbox, axis=0) / 2 - wh_bbox = full_bbox[1] - full_bbox[0] - new_half_size = wh_bbox * self.PERSON_BOX_PRE_ENLARGE_FACTOR / 2 - full_bbox = np.array([ - center_bbox - new_half_size, - center_bbox + new_half_size], np.int32) - - person_bbox = full_bbox.copy() - # refine person bbox - person_bbox[:, 0] = np.clip(person_bbox[:, 0], 0, image.shape[1]) - person_bbox[:, 1] = np.clip(person_bbox[:, 1], 0, image.shape[0]) - # crop to the size of interest - image = image[person_bbox[0][1]:person_bbox[1][1], person_bbox[0][0]:person_bbox[1][0], :] - # pad to square - left, top = person_bbox[0] - full_bbox[0] - right, bottom = full_bbox[1] - person_bbox[1] - image = cv.copyMakeBorder(image, top, bottom, left, right, cv.BORDER_CONSTANT, None, (0, 0, 0)) - pad_bias += person_bbox[0] - [left, top] - # compute rotation - mid_hip_point -= pad_bias - full_body_point -= pad_bias - radians = np.pi / 2 - np.arctan2(-(full_body_point[1] - mid_hip_point[1]), full_body_point[0] - mid_hip_point[0]) - radians = radians - 2 * np.pi * np.floor((radians + np.pi) / (2 * np.pi)) - angle = np.rad2deg(radians) - # get rotation matrix - rotation_matrix = cv.getRotationMatrix2D(mid_hip_point, angle, 1.0) - # get rotated image - rotated_image = cv.warpAffine(image, rotation_matrix, (image.shape[1], image.shape[0])) - # get landmark bounding box - blob = cv.resize(rotated_image, dsize=self.input_size, interpolation=cv.INTER_AREA).astype(np.float32) - rotated_person_bbox = np.array([[0, 0], [image.shape[1], image.shape[0]]], dtype=np.int32) - blob = cv.cvtColor(blob, cv.COLOR_BGR2RGB) - blob = blob / 255. # [0, 1] - return blob[np.newaxis, :, :, :], rotated_person_bbox, angle, rotation_matrix, pad_bias - - def infer(self, image, person): - h, w, _ = image.shape - # Preprocess - input_blob, rotated_person_bbox, angle, rotation_matrix, pad_bias = self._preprocess(image, person) - - # Forward - self.model.setInput(input_blob) - output_blob = self.model.forward(self.model.getUnconnectedOutLayersNames()) - - # Postprocess - results = self._postprocess(output_blob, rotated_person_bbox, angle, rotation_matrix, pad_bias, np.array([w, h])) - return results # [bbox_coords, landmarks_coords, conf] - - def _postprocess(self, blob, rotated_person_bbox, angle, rotation_matrix, pad_bias, img_size): - landmarks, conf, mask, heatmap, landmarks_word = blob - - conf = conf[0][0] - if conf < self.conf_threshold: - return None - - landmarks = landmarks[0].reshape(-1, 5) # shape: (1, 195) -> (39, 5) - landmarks_word = landmarks_word[0].reshape(-1, 3) # shape: (1, 117) -> (39, 3) - - # recover sigmoid score - landmarks[:, 3:] = 1 / (1 + np.exp(-landmarks[:, 3:])) - # TODO: refine landmarks with heatmap. reference: https://github.com/tensorflow/tfjs-models/blob/master/pose-detection/src/blazepose_tfjs/detector.ts#L577-L582 - heatmap = heatmap[0] - - # transform coords back to the input coords - wh_rotated_person_bbox = rotated_person_bbox[1] - rotated_person_bbox[0] - scale_factor = wh_rotated_person_bbox / self.input_size - landmarks[:, :2] = (landmarks[:, :2] - self.input_size / 2) * scale_factor - landmarks[:, 2] = landmarks[:, 2] * max(scale_factor) # depth scaling - coords_rotation_matrix = cv.getRotationMatrix2D((0, 0), angle, 1.0) - rotated_landmarks = np.dot(landmarks[:, :2], coords_rotation_matrix[:, :2]) - rotated_landmarks = np.c_[rotated_landmarks, landmarks[:, 2:]] - rotated_landmarks_world = np.dot(landmarks_word[:, :2], coords_rotation_matrix[:, :2]) - rotated_landmarks_world = np.c_[rotated_landmarks_world, landmarks_word[:, 2]] - # invert rotation - rotation_component = np.array([ - [rotation_matrix[0][0], rotation_matrix[1][0]], - [rotation_matrix[0][1], rotation_matrix[1][1]]]) - translation_component = np.array([ - rotation_matrix[0][2], rotation_matrix[1][2]]) - inverted_translation = np.array([ - -np.dot(rotation_component[0], translation_component), - -np.dot(rotation_component[1], translation_component)]) - inverse_rotation_matrix = np.c_[rotation_component, inverted_translation] - # get box center - center = np.append(np.sum(rotated_person_bbox, axis=0) / 2, 1) - original_center = np.array([ - np.dot(center, inverse_rotation_matrix[0]), - np.dot(center, inverse_rotation_matrix[1])]) - landmarks[:, :2] = rotated_landmarks[:, :2] + original_center + pad_bias - - # get bounding box from rotated_landmarks - bbox = np.array([ - np.amin(landmarks[:, :2], axis=0), - np.amax(landmarks[:, :2], axis=0)]) # [top-left, bottom-right] - center_bbox = np.sum(bbox, axis=0) / 2 - wh_bbox = bbox[1] - bbox[0] - new_half_size = wh_bbox * self.PERSON_BOX_ENLARGE_FACTOR / 2 - bbox = np.array([ - center_bbox - new_half_size, - center_bbox + new_half_size]) - - # invert rotation for mask - mask = mask[0].reshape(256, 256) # shape: (1, 256, 256, 1) -> (256, 256) - invert_rotation_matrix = cv.getRotationMatrix2D((mask.shape[1]/2, mask.shape[0]/2), -angle, 1.0) - invert_rotation_mask = cv.warpAffine(mask, invert_rotation_matrix, (mask.shape[1], mask.shape[0])) - # enlarge mask - invert_rotation_mask = cv.resize(invert_rotation_mask, wh_rotated_person_bbox) - # crop and pad mask - min_w, min_h = -np.minimum(pad_bias, 0) - left, top = np.maximum(pad_bias, 0) - pad_over = img_size - [invert_rotation_mask.shape[1], invert_rotation_mask.shape[0]] - pad_bias - max_w, max_h = np.minimum(pad_over, 0) + [invert_rotation_mask.shape[1], invert_rotation_mask.shape[0]] - right, bottom = np.maximum(pad_over, 0) - invert_rotation_mask = invert_rotation_mask[min_h:max_h, min_w:max_w] - invert_rotation_mask = cv.copyMakeBorder(invert_rotation_mask, top, bottom, left, right, cv.BORDER_CONSTANT, None, 0) - # binarize mask - invert_rotation_mask = np.where(invert_rotation_mask > 0, 255, 0).astype(np.uint8) - - # 2*2 person bbox: [[x1, y1], [x2, y2]] - # 39*5 screen landmarks: 33 keypoints and 6 auxiliary points with [x, y, z, visibility, presence], z value is relative to HIP - # Visibility is probability that a keypoint is located within the frame and not occluded by another bigger body part or another object - # Presence is probability that a keypoint is located within the frame - # 39*3 world landmarks: 33 keypoints and 6 auxiliary points with [x, y, z] 3D metric x, y, z coordinate - # img_height*img_width mask: gray mask, where 255 indicates the full body of a person and 0 means background - # 64*64*39 heatmap: currently only used for refining landmarks, requires sigmod processing before use - # conf: confidence of prediction - return [bbox, landmarks, rotated_landmarks_world, invert_rotation_mask, heatmap, conf] diff --git a/models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar.onnx b/models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar.onnx deleted file mode 100644 index 2544a0a4..00000000 --- a/models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9d89c599319a18fb7d2e28451a883476164543182bafca5f09eb2cf767ed2f3f -size 5557238 diff --git a/models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar_int8bq.onnx b/models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar_int8bq.onnx deleted file mode 100644 index 51df007a..00000000 --- a/models/pose_estimation_mediapipe/pose_estimation_mediapipe_2023mar_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1848ea80f657f4620e0de160594f598ed8107c9e7785d0be74f65cf15aa6deb1 -size 1694896 diff --git a/models/qrcode_wechatqrcode/CMakeLists.txt b/models/qrcode_wechatqrcode/CMakeLists.txt deleted file mode 100644 index 823d7e17..00000000 --- a/models/qrcode_wechatqrcode/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.24.0) -project(opencv_zoo_qrcode_wechatqrcode) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") - -# Find OpenCV -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) - -add_executable(demo demo.cpp) -target_link_libraries(demo ${OpenCV_LIBS}) diff --git a/models/qrcode_wechatqrcode/LICENSE b/models/qrcode_wechatqrcode/LICENSE deleted file mode 100644 index 7a4a3ea2..00000000 --- a/models/qrcode_wechatqrcode/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/qrcode_wechatqrcode/README.md b/models/qrcode_wechatqrcode/README.md deleted file mode 100644 index 786490b3..00000000 --- a/models/qrcode_wechatqrcode/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# WeChatQRCode - -WeChatQRCode for detecting and parsing QR Code, contributed by [WeChat Computer Vision Team (WeChatCV)](https://github.com/WeChatCV). Visit [opencv/opencv_contrib/modules/wechat_qrcode](https://github.com/opencv/opencv_contrib/tree/master/modules/wechat_qrcode) for more details. - -Notes: - -- Model source: [opencv/opencv_3rdparty:wechat_qrcode_20210119](https://github.com/opencv/opencv_3rdparty/tree/wechat_qrcode_20210119) -- The APIs `cv::wechat_qrcode::WeChatQRCode` (C++) & `cv.wechat_qrcode_WeChatQRCode` (Python) are both designed to run on default backend (OpenCV) and target (CPU) only. Therefore, benchmark results of this model are only available on CPU devices, until the APIs are updated with setting backends and targets. - -## Demo - -### Python - -Run the following command to try the demo: - -```shell -# detect on camera input -python demo.py -# detect on an image -python demo.py --input /path/to/image -v - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -Install latest OpenCV (with opencv_contrib) and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build - -# detect on camera input -./build/demo -# detect on an image -./build/demo -i=/path/to/image -v -# get help messages -./build/demo -h -``` - -### Example outputs - -![webcam demo](./example_outputs/wechat_qrcode_demo.gif) - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference: - -- https://github.com/opencv/opencv_contrib/tree/master/modules/wechat_qrcode -- https://github.com/opencv/opencv_3rdparty/tree/wechat_qrcode_20210119 diff --git a/models/qrcode_wechatqrcode/demo.cpp b/models/qrcode_wechatqrcode/demo.cpp deleted file mode 100644 index 5f915a83..00000000 --- a/models/qrcode_wechatqrcode/demo.cpp +++ /dev/null @@ -1,192 +0,0 @@ -#include -#include -#include -#include -#include - -class WeChatQRCode { - public: - WeChatQRCode(const std::string& detect_prototxt, - const std::string& detect_model, - const std::string& sr_prototxt, const std::string& sr_model, - int backend_target_index) - : backend_target_index_(backend_target_index) { - - const std::vector> backend_target_pairs = { - {cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_CPU}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA}, - {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA_FP16}, - {cv::dnn::DNN_BACKEND_TIMVX, cv::dnn::DNN_TARGET_NPU}, - {cv::dnn::DNN_BACKEND_CANN, cv::dnn::DNN_TARGET_NPU}}; - - if (backend_target_index_ < 0 || - backend_target_index_ >= backend_target_pairs.size()) { - throw std::invalid_argument("Invalid backend-target index"); - } - - // initialize detector - detector_ = cv::makePtr( - detect_prototxt, detect_model, sr_prototxt, sr_model); - } - - std::pair, std::vector> detect( - const cv::Mat& image) { - std::vector results; - std::vector points; - results = detector_->detectAndDecode(image, points); - return {results, points}; - } - - cv::Mat visualize(const cv::Mat& image, - const std::vector& results, - const std::vector& points, - cv::Scalar points_color = cv::Scalar(0, 255, 0), - cv::Scalar text_color = cv::Scalar(0, 255, 0), - double fps = -1) const { - cv::Mat output = image.clone(); - - if (fps >= 0) { - cv::putText(output, "FPS: " + std::to_string(fps), cv::Point(0, 15), - cv::FONT_HERSHEY_SIMPLEX, 0.5, text_color); - } - - double fontScale = 0.5; - int fontSize = 1; - - for (size_t i = 0; i < results.size(); ++i) { - const auto& p = points[i]; - - for (int r = 0; r < p.rows; ++r) { - cv::Point point(p.at(r, 0), p.at(r, 1)); - cv::circle(output, point, 10, points_color, -1); - } - - int qrcode_center_x = (p.at(0, 0) + p.at(2, 0)) / 2; - int qrcode_center_y = (p.at(0, 1) + p.at(2, 1)) / 2; - - int baseline = 0; - cv::Size text_size = - cv::getTextSize(results[i], cv::FONT_HERSHEY_DUPLEX, fontScale, - fontSize, &baseline); - - cv::Point text_pos(qrcode_center_x - text_size.width / 2, - qrcode_center_y + text_size.height / 2); - - cv::putText(output, results[i], text_pos, cv::FONT_HERSHEY_DUPLEX, - fontScale, text_color, fontSize); - } - - return output; - } - - private: - int backend_target_index_; - cv::Ptr detector_; -}; - -int main(int argc, char** argv) { - - cv::CommandLineParser parser( - argc, argv, - "{help h | | Show this help message.}" - "{input i | | Set path to the input image. Omit for using default camera.}" - "{detect_prototxt_path | detect_2021nov.prototxt | Set path to detect.prototxt.}" - "{detect_model_path | detect_2021nov.caffemodel | Set path to detect.caffemodel.}" - "{sr_prototxt_path | sr_2021nov.prototxt | Set path to sr.prototxt.}" - "{sr_model_path | sr_2021nov.caffemodel | Set path to sr.caffemodel.}" - "{backend_target bt | 0 | Choose one of the backend-target pairs to run this demo.}" - "{save s | false | Specify to save file with results.}" - "{vis v | false | Specify to open a new window to show results.}"); - - if (parser.has("help")) { - parser.printMessage(); - return 0; - } - - // get paths - std::string detect_prototxt = parser.get("detect_prototxt_path"); - std::string detect_model = parser.get("detect_model_path"); - std::string sr_prototxt = parser.get("sr_prototxt_path"); - std::string sr_model = parser.get("sr_model_path"); - int backend_target_index = parser.get("backend_target"); - - // input check - std::string input_path = parser.get("input"); - bool save_result = parser.get("save"); - bool visualize_result = parser.get("vis"); - - try { - WeChatQRCode qrDetector(detect_prototxt, detect_model, sr_prototxt, - sr_model, backend_target_index); - - if (!input_path.empty()) { - // process image - cv::Mat image = cv::imread(input_path); - if (image.empty()) { - std::cerr << "Could not read the image" << std::endl; - return -1; - } - - std::pair, std::vector> detectionResult = qrDetector.detect(image); - auto& results = detectionResult.first; - auto& points = detectionResult.second; - - for (const auto& result : results) { - std::cout << result << std::endl; - } - - cv::Mat result_image = qrDetector.visualize(image, results, points); - - if (save_result) { - cv::imwrite("result.jpg", result_image); - std::cout << "Results saved to result.jpg" << std::endl; - } - - if (visualize_result) { - cv::imshow(input_path, result_image); - cv::waitKey(0); - } - } else { - // process camera - cv::VideoCapture cap(0); - if (!cap.isOpened()) { - std::cerr << "Error opening camera" << std::endl; - return -1; - } - - cv::Mat frame; - cv::TickMeter tm; - - while (true) { - cap >> frame; - if (frame.empty()) { - std::cout << "No frames grabbed" << std::endl; - break; - } - - std::pair, std::vector> detectionResult = qrDetector.detect(frame); - auto& results = detectionResult.first; - auto& points = detectionResult.second; - - tm.start(); - double fps = tm.getFPS(); - tm.stop(); - - cv::Mat result_frame = qrDetector.visualize( - frame, results, points, cv::Scalar(0, 255, 0), - cv::Scalar(0, 255, 0), fps); - cv::imshow("WeChatQRCode Demo", result_frame); - - tm.reset(); - - if (cv::waitKey(1) >= 0) break; - } - } - - } catch (const std::exception& ex) { - std::cerr << "Error: " << ex.what() << std::endl; - return -1; - } - - return 0; -} diff --git a/models/qrcode_wechatqrcode/demo.py b/models/qrcode_wechatqrcode/demo.py deleted file mode 100644 index 2cd08b56..00000000 --- a/models/qrcode_wechatqrcode/demo.py +++ /dev/null @@ -1,136 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from wechatqrcode import WeChatQRCode - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser( - description="WeChat QR code detector for detecting and parsing QR code (https://github.com/opencv/opencv_contrib/tree/master/modules/wechat_qrcode)") -parser.add_argument('--input', '-i', type=str, - help='Usage: Set path to the input image. Omit for using default camera.') -parser.add_argument('--detect_prototxt_path', type=str, default='detect_2021nov.prototxt', - help='Usage: Set path to detect.prototxt.') -parser.add_argument('--detect_model_path', type=str, default='detect_2021nov.caffemodel', - help='Usage: Set path to detect.caffemodel.') -parser.add_argument('--sr_prototxt_path', type=str, default='sr_2021nov.prototxt', - help='Usage: Set path to sr.prototxt.') -parser.add_argument('--sr_model_path', type=str, default='sr_2021nov.caffemodel', - help='Usage: Set path to sr.caffemodel.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(image, res, points, points_color=(0, 255, 0), text_color=(0, 255, 0), fps=None): - output = image.copy() - h, w, _ = output.shape - - if fps is not None: - cv.putText(output, 'FPS: {:.2f}'.format(fps), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, text_color) - - fontScale = 0.5 - fontSize = 1 - for r, p in zip(res, points): - p = p.astype(np.int32) - for _p in p: - cv.circle(output, _p, 10, points_color, -1) - - qrcode_center_x = int((p[0][0] + p[2][0]) / 2) - qrcode_center_y = int((p[0][1] + p[2][1]) / 2) - - text_size, baseline = cv.getTextSize(r, cv.FONT_HERSHEY_DUPLEX, fontScale, fontSize) - text_x = qrcode_center_x - int(text_size[0] / 2) - text_y = qrcode_center_y - int(text_size[1] / 2) - cv.putText(output, '{}'.format(r), (text_x, text_y), cv.FONT_HERSHEY_DUPLEX, fontScale, text_color, fontSize) - - return output - - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate WeChatQRCode - model = WeChatQRCode(args.detect_prototxt_path, - args.detect_model_path, - args.sr_prototxt_path, - args.sr_model_path, - backendId=backend_id, - targetId=target_id) - - # If input is an image: - if args.input is not None: - image = cv.imread(args.input) - res, points = model.infer(image) - - # Print results: - print(res) - print(points) - - # Draw results on the input image - image = visualize(image, res, points) - - # Save results if save is true - if args.save: - print('Results saved to result.jpg\n') - cv.imwrite('result.jpg', image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, frame = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - # Inference - tm.start() - res, points = model.infer(frame) - tm.stop() - fps = tm.getFPS() - - # Draw results on the input image - frame = visualize(frame, res, points, fps=fps) - - # Visualize results in a new window - cv.imshow('WeChatQRCode Demo', frame) - - tm.reset() diff --git a/models/qrcode_wechatqrcode/detect_2021nov.caffemodel b/models/qrcode_wechatqrcode/detect_2021nov.caffemodel deleted file mode 100644 index 458c760b..00000000 --- a/models/qrcode_wechatqrcode/detect_2021nov.caffemodel +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cc49b8c9babaf45f3037610fe499df38c8819ebda29e90ca9f2e33270f6ef809 -size 965430 diff --git a/models/qrcode_wechatqrcode/detect_2021nov.prototxt b/models/qrcode_wechatqrcode/detect_2021nov.prototxt deleted file mode 100644 index bd2417c9..00000000 --- a/models/qrcode_wechatqrcode/detect_2021nov.prototxt +++ /dev/null @@ -1,2716 +0,0 @@ -layer { - name: "data" - type: "Input" - top: "data" - input_param { - shape { - dim: 1 - dim: 1 - dim: 384 - dim: 384 - } - } -} -layer { - name: "data/bn" - type: "BatchNorm" - bottom: "data" - top: "data" - param { - lr_mult: 0.0 - decay_mult: 0.0 - } - param { - lr_mult: 0.0 - decay_mult: 0.0 - } - param { - lr_mult: 0.0 - decay_mult: 0.0 - } -} -layer { - name: "data/bn/scale" - type: "Scale" - bottom: "data" - top: "data" - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - scale_param { - filler { - type: "constant" - value: 1.0 - } - bias_term: true - bias_filler { - type: "constant" - value: 0.0 - } - } -} -layer { - name: "stage1" - type: "Convolution" - bottom: "data" - top: "stage1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 24 - bias_term: true - pad: 1 - kernel_size: 3 - group: 1 - stride: 2 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage1/bn" - type: "BatchNorm" - bottom: "stage1" - top: "stage1" - param { - lr_mult: 0.0 - decay_mult: 0.0 - } - param { - lr_mult: 0.0 - decay_mult: 0.0 - } - param { - lr_mult: 0.0 - decay_mult: 0.0 - } -} -layer { - name: "stage1/bn/scale" - type: "Scale" - bottom: "stage1" - top: "stage1" - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - scale_param { - filler { - type: "constant" - value: 1.0 - } - bias_term: true - bias_filler { - type: "constant" - value: 0.0 - } - } -} -layer { - name: "stage1/relu" - type: "ReLU" - bottom: "stage1" - top: "stage1" -} -layer { - name: "stage2" - type: "Pooling" - bottom: "stage1" - top: "stage2" - pooling_param { - pool: MAX - kernel_size: 3 - stride: 2 - pad: 0 - } -} -layer { - name: "stage3_1/conv1" - type: "Convolution" - bottom: "stage2" - top: "stage3_1/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_1/conv1/relu" - type: "ReLU" - bottom: "stage3_1/conv1" - top: "stage3_1/conv1" -} -layer { - name: "stage3_1/conv2" - type: "Convolution" - bottom: "stage3_1/conv1" - top: "stage3_1/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 1 - kernel_size: 3 - group: 16 - stride: 2 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_1/conv3" - type: "Convolution" - bottom: "stage3_1/conv2" - top: "stage3_1/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 64 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_1/relu" - type: "ReLU" - bottom: "stage3_1/conv3" - top: "stage3_1/conv3" -} -layer { - name: "stage3_2/conv1" - type: "Convolution" - bottom: "stage3_1/conv3" - top: "stage3_2/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_2/conv1/relu" - type: "ReLU" - bottom: "stage3_2/conv1" - top: "stage3_2/conv1" -} -layer { - name: "stage3_2/conv2" - type: "Convolution" - bottom: "stage3_2/conv1" - top: "stage3_2/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 1 - kernel_size: 3 - group: 16 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_2/conv3" - type: "Convolution" - bottom: "stage3_2/conv2" - top: "stage3_2/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 64 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_2/sum" - type: "Eltwise" - bottom: "stage3_1/conv3" - bottom: "stage3_2/conv3" - top: "stage3_2/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage3_2/relu" - type: "ReLU" - bottom: "stage3_2/sum" - top: "stage3_2/sum" -} -layer { - name: "stage3_3/conv1" - type: "Convolution" - bottom: "stage3_2/sum" - top: "stage3_3/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_3/conv1/relu" - type: "ReLU" - bottom: "stage3_3/conv1" - top: "stage3_3/conv1" -} -layer { - name: "stage3_3/conv2" - type: "Convolution" - bottom: "stage3_3/conv1" - top: "stage3_3/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 1 - kernel_size: 3 - group: 16 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_3/conv3" - type: "Convolution" - bottom: "stage3_3/conv2" - top: "stage3_3/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 64 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_3/sum" - type: "Eltwise" - bottom: "stage3_2/sum" - bottom: "stage3_3/conv3" - top: "stage3_3/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage3_3/relu" - type: "ReLU" - bottom: "stage3_3/sum" - top: "stage3_3/sum" -} -layer { - name: "stage3_4/conv1" - type: "Convolution" - bottom: "stage3_3/sum" - top: "stage3_4/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_4/conv1/relu" - type: "ReLU" - bottom: "stage3_4/conv1" - top: "stage3_4/conv1" -} -layer { - name: "stage3_4/conv2" - type: "Convolution" - bottom: "stage3_4/conv1" - top: "stage3_4/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 16 - pad: 1 - kernel_size: 3 - group: 16 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_4/conv3" - type: "Convolution" - bottom: "stage3_4/conv2" - top: "stage3_4/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 64 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage3_4/sum" - type: "Eltwise" - bottom: "stage3_3/sum" - bottom: "stage3_4/conv3" - top: "stage3_4/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage3_4/relu" - type: "ReLU" - bottom: "stage3_4/sum" - top: "stage3_4/sum" -} -layer { - name: "stage4_1/conv1" - type: "Convolution" - bottom: "stage3_4/sum" - top: "stage4_1/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_1/conv1/relu" - type: "ReLU" - bottom: "stage4_1/conv1" - top: "stage4_1/conv1" -} -layer { - name: "stage4_1/conv2" - type: "Convolution" - bottom: "stage4_1/conv1" - top: "stage4_1/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 2 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_1/conv3" - type: "Convolution" - bottom: "stage4_1/conv2" - top: "stage4_1/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_1/relu" - type: "ReLU" - bottom: "stage4_1/conv3" - top: "stage4_1/conv3" -} -layer { - name: "stage4_2/conv1" - type: "Convolution" - bottom: "stage4_1/conv3" - top: "stage4_2/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_2/conv1/relu" - type: "ReLU" - bottom: "stage4_2/conv1" - top: "stage4_2/conv1" -} -layer { - name: "stage4_2/conv2" - type: "Convolution" - bottom: "stage4_2/conv1" - top: "stage4_2/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_2/conv3" - type: "Convolution" - bottom: "stage4_2/conv2" - top: "stage4_2/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_2/sum" - type: "Eltwise" - bottom: "stage4_1/conv3" - bottom: "stage4_2/conv3" - top: "stage4_2/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage4_2/relu" - type: "ReLU" - bottom: "stage4_2/sum" - top: "stage4_2/sum" -} -layer { - name: "stage4_3/conv1" - type: "Convolution" - bottom: "stage4_2/sum" - top: "stage4_3/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_3/conv1/relu" - type: "ReLU" - bottom: "stage4_3/conv1" - top: "stage4_3/conv1" -} -layer { - name: "stage4_3/conv2" - type: "Convolution" - bottom: "stage4_3/conv1" - top: "stage4_3/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_3/conv3" - type: "Convolution" - bottom: "stage4_3/conv2" - top: "stage4_3/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_3/sum" - type: "Eltwise" - bottom: "stage4_2/sum" - bottom: "stage4_3/conv3" - top: "stage4_3/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage4_3/relu" - type: "ReLU" - bottom: "stage4_3/sum" - top: "stage4_3/sum" -} -layer { - name: "stage4_4/conv1" - type: "Convolution" - bottom: "stage4_3/sum" - top: "stage4_4/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_4/conv1/relu" - type: "ReLU" - bottom: "stage4_4/conv1" - top: "stage4_4/conv1" -} -layer { - name: "stage4_4/conv2" - type: "Convolution" - bottom: "stage4_4/conv1" - top: "stage4_4/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_4/conv3" - type: "Convolution" - bottom: "stage4_4/conv2" - top: "stage4_4/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_4/sum" - type: "Eltwise" - bottom: "stage4_3/sum" - bottom: "stage4_4/conv3" - top: "stage4_4/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage4_4/relu" - type: "ReLU" - bottom: "stage4_4/sum" - top: "stage4_4/sum" -} -layer { - name: "stage4_5/conv1" - type: "Convolution" - bottom: "stage4_4/sum" - top: "stage4_5/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_5/conv1/relu" - type: "ReLU" - bottom: "stage4_5/conv1" - top: "stage4_5/conv1" -} -layer { - name: "stage4_5/conv2" - type: "Convolution" - bottom: "stage4_5/conv1" - top: "stage4_5/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_5/conv3" - type: "Convolution" - bottom: "stage4_5/conv2" - top: "stage4_5/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_5/sum" - type: "Eltwise" - bottom: "stage4_4/sum" - bottom: "stage4_5/conv3" - top: "stage4_5/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage4_5/relu" - type: "ReLU" - bottom: "stage4_5/sum" - top: "stage4_5/sum" -} -layer { - name: "stage4_6/conv1" - type: "Convolution" - bottom: "stage4_5/sum" - top: "stage4_6/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_6/conv1/relu" - type: "ReLU" - bottom: "stage4_6/conv1" - top: "stage4_6/conv1" -} -layer { - name: "stage4_6/conv2" - type: "Convolution" - bottom: "stage4_6/conv1" - top: "stage4_6/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_6/conv3" - type: "Convolution" - bottom: "stage4_6/conv2" - top: "stage4_6/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_6/sum" - type: "Eltwise" - bottom: "stage4_5/sum" - bottom: "stage4_6/conv3" - top: "stage4_6/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage4_6/relu" - type: "ReLU" - bottom: "stage4_6/sum" - top: "stage4_6/sum" -} -layer { - name: "stage4_7/conv1" - type: "Convolution" - bottom: "stage4_6/sum" - top: "stage4_7/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_7/conv1/relu" - type: "ReLU" - bottom: "stage4_7/conv1" - top: "stage4_7/conv1" -} -layer { - name: "stage4_7/conv2" - type: "Convolution" - bottom: "stage4_7/conv1" - top: "stage4_7/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_7/conv3" - type: "Convolution" - bottom: "stage4_7/conv2" - top: "stage4_7/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_7/sum" - type: "Eltwise" - bottom: "stage4_6/sum" - bottom: "stage4_7/conv3" - top: "stage4_7/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage4_7/relu" - type: "ReLU" - bottom: "stage4_7/sum" - top: "stage4_7/sum" -} -layer { - name: "stage4_8/conv1" - type: "Convolution" - bottom: "stage4_7/sum" - top: "stage4_8/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_8/conv1/relu" - type: "ReLU" - bottom: "stage4_8/conv1" - top: "stage4_8/conv1" -} -layer { - name: "stage4_8/conv2" - type: "Convolution" - bottom: "stage4_8/conv1" - top: "stage4_8/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 1 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_8/conv3" - type: "Convolution" - bottom: "stage4_8/conv2" - top: "stage4_8/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage4_8/sum" - type: "Eltwise" - bottom: "stage4_7/sum" - bottom: "stage4_8/conv3" - top: "stage4_8/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage4_8/relu" - type: "ReLU" - bottom: "stage4_8/sum" - top: "stage4_8/sum" -} -layer { - name: "stage5_1/conv1" - type: "Convolution" - bottom: "stage4_8/sum" - top: "stage5_1/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_1/conv1/relu" - type: "ReLU" - bottom: "stage5_1/conv1" - top: "stage5_1/conv1" -} -layer { - name: "stage5_1/conv2" - type: "Convolution" - bottom: "stage5_1/conv1" - top: "stage5_1/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 2 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage5_1/conv3" - type: "Convolution" - bottom: "stage5_1/conv2" - top: "stage5_1/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_1/relu" - type: "ReLU" - bottom: "stage5_1/conv3" - top: "stage5_1/conv3" -} -layer { - name: "stage5_2/conv1" - type: "Convolution" - bottom: "stage5_1/conv3" - top: "stage5_2/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_2/conv1/relu" - type: "ReLU" - bottom: "stage5_2/conv1" - top: "stage5_2/conv1" -} -layer { - name: "stage5_2/conv2" - type: "Convolution" - bottom: "stage5_2/conv1" - top: "stage5_2/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage5_2/conv3" - type: "Convolution" - bottom: "stage5_2/conv2" - top: "stage5_2/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_2/sum" - type: "Eltwise" - bottom: "stage5_1/conv3" - bottom: "stage5_2/conv3" - top: "stage5_2/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage5_2/relu" - type: "ReLU" - bottom: "stage5_2/sum" - top: "stage5_2/sum" -} -layer { - name: "stage5_3/conv1" - type: "Convolution" - bottom: "stage5_2/sum" - top: "stage5_3/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_3/conv1/relu" - type: "ReLU" - bottom: "stage5_3/conv1" - top: "stage5_3/conv1" -} -layer { - name: "stage5_3/conv2" - type: "Convolution" - bottom: "stage5_3/conv1" - top: "stage5_3/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage5_3/conv3" - type: "Convolution" - bottom: "stage5_3/conv2" - top: "stage5_3/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_3/sum" - type: "Eltwise" - bottom: "stage5_2/sum" - bottom: "stage5_3/conv3" - top: "stage5_3/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage5_3/relu" - type: "ReLU" - bottom: "stage5_3/sum" - top: "stage5_3/sum" -} -layer { - name: "stage5_4/conv1" - type: "Convolution" - bottom: "stage5_3/sum" - top: "stage5_4/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_4/conv1/relu" - type: "ReLU" - bottom: "stage5_4/conv1" - top: "stage5_4/conv1" -} -layer { - name: "stage5_4/conv2" - type: "Convolution" - bottom: "stage5_4/conv1" - top: "stage5_4/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage5_4/conv3" - type: "Convolution" - bottom: "stage5_4/conv2" - top: "stage5_4/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage5_4/sum" - type: "Eltwise" - bottom: "stage5_3/sum" - bottom: "stage5_4/conv3" - top: "stage5_4/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage5_4/relu" - type: "ReLU" - bottom: "stage5_4/sum" - top: "stage5_4/sum" -} -layer { - name: "stage6_1/conv4" - type: "Convolution" - bottom: "stage5_4/sum" - top: "stage6_1/conv4" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage6_1/conv1" - type: "Convolution" - bottom: "stage5_4/sum" - top: "stage6_1/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage6_1/conv1/relu" - type: "ReLU" - bottom: "stage6_1/conv1" - top: "stage6_1/conv1" -} -layer { - name: "stage6_1/conv2" - type: "Convolution" - bottom: "stage6_1/conv1" - top: "stage6_1/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage6_1/conv3" - type: "Convolution" - bottom: "stage6_1/conv2" - top: "stage6_1/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage6_1/sum" - type: "Eltwise" - bottom: "stage6_1/conv4" - bottom: "stage6_1/conv3" - top: "stage6_1/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage6_1/relu" - type: "ReLU" - bottom: "stage6_1/sum" - top: "stage6_1/sum" -} -layer { - name: "stage6_2/conv1" - type: "Convolution" - bottom: "stage6_1/sum" - top: "stage6_2/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage6_2/conv1/relu" - type: "ReLU" - bottom: "stage6_2/conv1" - top: "stage6_2/conv1" -} -layer { - name: "stage6_2/conv2" - type: "Convolution" - bottom: "stage6_2/conv1" - top: "stage6_2/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage6_2/conv3" - type: "Convolution" - bottom: "stage6_2/conv2" - top: "stage6_2/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage6_2/sum" - type: "Eltwise" - bottom: "stage6_1/sum" - bottom: "stage6_2/conv3" - top: "stage6_2/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage6_2/relu" - type: "ReLU" - bottom: "stage6_2/sum" - top: "stage6_2/sum" -} -layer { - name: "stage7_1/conv4" - type: "Convolution" - bottom: "stage6_2/sum" - top: "stage7_1/conv4" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage7_1/conv1" - type: "Convolution" - bottom: "stage6_2/sum" - top: "stage7_1/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage7_1/conv1/relu" - type: "ReLU" - bottom: "stage7_1/conv1" - top: "stage7_1/conv1" -} -layer { - name: "stage7_1/conv2" - type: "Convolution" - bottom: "stage7_1/conv1" - top: "stage7_1/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage7_1/conv3" - type: "Convolution" - bottom: "stage7_1/conv2" - top: "stage7_1/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage7_1/sum" - type: "Eltwise" - bottom: "stage7_1/conv4" - bottom: "stage7_1/conv3" - top: "stage7_1/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage7_1/relu" - type: "ReLU" - bottom: "stage7_1/sum" - top: "stage7_1/sum" -} -layer { - name: "stage7_2/conv1" - type: "Convolution" - bottom: "stage7_1/sum" - top: "stage7_2/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage7_2/conv1/relu" - type: "ReLU" - bottom: "stage7_2/conv1" - top: "stage7_2/conv1" -} -layer { - name: "stage7_2/conv2" - type: "Convolution" - bottom: "stage7_2/conv1" - top: "stage7_2/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage7_2/conv3" - type: "Convolution" - bottom: "stage7_2/conv2" - top: "stage7_2/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage7_2/sum" - type: "Eltwise" - bottom: "stage7_1/sum" - bottom: "stage7_2/conv3" - top: "stage7_2/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage7_2/relu" - type: "ReLU" - bottom: "stage7_2/sum" - top: "stage7_2/sum" -} -layer { - name: "stage8_1/conv4" - type: "Convolution" - bottom: "stage7_2/sum" - top: "stage8_1/conv4" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage8_1/conv1" - type: "Convolution" - bottom: "stage7_2/sum" - top: "stage8_1/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage8_1/conv1/relu" - type: "ReLU" - bottom: "stage8_1/conv1" - top: "stage8_1/conv1" -} -layer { - name: "stage8_1/conv2" - type: "Convolution" - bottom: "stage8_1/conv1" - top: "stage8_1/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage8_1/conv3" - type: "Convolution" - bottom: "stage8_1/conv2" - top: "stage8_1/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage8_1/sum" - type: "Eltwise" - bottom: "stage8_1/conv4" - bottom: "stage8_1/conv3" - top: "stage8_1/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage8_1/relu" - type: "ReLU" - bottom: "stage8_1/sum" - top: "stage8_1/sum" -} -layer { - name: "stage8_2/conv1" - type: "Convolution" - bottom: "stage8_1/sum" - top: "stage8_2/conv1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage8_2/conv1/relu" - type: "ReLU" - bottom: "stage8_2/conv1" - top: "stage8_2/conv1" -} -layer { - name: "stage8_2/conv2" - type: "Convolution" - bottom: "stage8_2/conv1" - top: "stage8_2/conv2" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 32 - pad: 2 - kernel_size: 3 - group: 32 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 2 - } -} -layer { - name: "stage8_2/conv3" - type: "Convolution" - bottom: "stage8_2/conv2" - top: "stage8_2/conv3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - convolution_param { - num_output: 128 - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "stage8_2/sum" - type: "Eltwise" - bottom: "stage8_1/sum" - bottom: "stage8_2/conv3" - top: "stage8_2/sum" - eltwise_param { - operation: SUM - } -} -layer { - name: "stage8_2/relu" - type: "ReLU" - bottom: "stage8_2/sum" - top: "stage8_2/sum" -} -layer { - name: "cls1/conv" - type: "Convolution" - bottom: "stage4_8/sum" - top: "cls1/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 12 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "cls1/permute" - type: "Permute" - bottom: "cls1/conv" - top: "cls1/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "cls1/flatten" - type: "Flatten" - bottom: "cls1/permute" - top: "cls1/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "loc1/conv" - type: "Convolution" - bottom: "stage4_8/sum" - top: "loc1/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 24 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "loc1/permute" - type: "Permute" - bottom: "loc1/conv" - top: "loc1/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "loc1/flatten" - type: "Flatten" - bottom: "loc1/permute" - top: "loc1/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "stage4_8/sum/prior_box" - type: "PriorBox" - bottom: "stage4_8/sum" - bottom: "data" - top: "stage4_8/sum/prior_box" - prior_box_param { - min_size: 50.0 - max_size: 100.0 - aspect_ratio: 2.0 - aspect_ratio: 0.5 - aspect_ratio: 3.0 - aspect_ratio: 0.3333333432674408 - flip: false - clip: false - variance: 0.10000000149011612 - variance: 0.10000000149011612 - variance: 0.20000000298023224 - variance: 0.20000000298023224 - step: 16.0 - } -} -layer { - name: "cls2/conv" - type: "Convolution" - bottom: "stage5_4/sum" - top: "cls2/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 12 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "cls2/permute" - type: "Permute" - bottom: "cls2/conv" - top: "cls2/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "cls2/flatten" - type: "Flatten" - bottom: "cls2/permute" - top: "cls2/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "loc2/conv" - type: "Convolution" - bottom: "stage5_4/sum" - top: "loc2/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 24 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "loc2/permute" - type: "Permute" - bottom: "loc2/conv" - top: "loc2/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "loc2/flatten" - type: "Flatten" - bottom: "loc2/permute" - top: "loc2/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "stage5_4/sum/prior_box" - type: "PriorBox" - bottom: "stage5_4/sum" - bottom: "data" - top: "stage5_4/sum/prior_box" - prior_box_param { - min_size: 100.0 - max_size: 150.0 - aspect_ratio: 2.0 - aspect_ratio: 0.5 - aspect_ratio: 3.0 - aspect_ratio: 0.3333333432674408 - flip: false - clip: false - variance: 0.10000000149011612 - variance: 0.10000000149011612 - variance: 0.20000000298023224 - variance: 0.20000000298023224 - step: 32.0 - } -} -layer { - name: "cls3/conv" - type: "Convolution" - bottom: "stage6_2/sum" - top: "cls3/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 12 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "cls3/permute" - type: "Permute" - bottom: "cls3/conv" - top: "cls3/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "cls3/flatten" - type: "Flatten" - bottom: "cls3/permute" - top: "cls3/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "loc3/conv" - type: "Convolution" - bottom: "stage6_2/sum" - top: "loc3/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 24 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "loc3/permute" - type: "Permute" - bottom: "loc3/conv" - top: "loc3/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "loc3/flatten" - type: "Flatten" - bottom: "loc3/permute" - top: "loc3/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "stage6_2/sum/prior_box" - type: "PriorBox" - bottom: "stage6_2/sum" - bottom: "data" - top: "stage6_2/sum/prior_box" - prior_box_param { - min_size: 150.0 - max_size: 200.0 - aspect_ratio: 2.0 - aspect_ratio: 0.5 - aspect_ratio: 3.0 - aspect_ratio: 0.3333333432674408 - flip: false - clip: false - variance: 0.10000000149011612 - variance: 0.10000000149011612 - variance: 0.20000000298023224 - variance: 0.20000000298023224 - step: 32.0 - } -} -layer { - name: "cls4/conv" - type: "Convolution" - bottom: "stage7_2/sum" - top: "cls4/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 12 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "cls4/permute" - type: "Permute" - bottom: "cls4/conv" - top: "cls4/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "cls4/flatten" - type: "Flatten" - bottom: "cls4/permute" - top: "cls4/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "loc4/conv" - type: "Convolution" - bottom: "stage7_2/sum" - top: "loc4/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 24 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "loc4/permute" - type: "Permute" - bottom: "loc4/conv" - top: "loc4/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "loc4/flatten" - type: "Flatten" - bottom: "loc4/permute" - top: "loc4/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "stage7_2/sum/prior_box" - type: "PriorBox" - bottom: "stage7_2/sum" - bottom: "data" - top: "stage7_2/sum/prior_box" - prior_box_param { - min_size: 200.0 - max_size: 300.0 - aspect_ratio: 2.0 - aspect_ratio: 0.5 - aspect_ratio: 3.0 - aspect_ratio: 0.3333333432674408 - flip: false - clip: false - variance: 0.10000000149011612 - variance: 0.10000000149011612 - variance: 0.20000000298023224 - variance: 0.20000000298023224 - step: 32.0 - } -} -layer { - name: "cls5/conv" - type: "Convolution" - bottom: "stage8_2/sum" - top: "cls5/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 12 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "cls5/permute" - type: "Permute" - bottom: "cls5/conv" - top: "cls5/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "cls5/flatten" - type: "Flatten" - bottom: "cls5/permute" - top: "cls5/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "loc5/conv" - type: "Convolution" - bottom: "stage8_2/sum" - top: "loc5/conv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 24 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - dilation: 1 - } -} -layer { - name: "loc5/permute" - type: "Permute" - bottom: "loc5/conv" - top: "loc5/permute" - permute_param { - order: 0 - order: 2 - order: 3 - order: 1 - } -} -layer { - name: "loc5/flatten" - type: "Flatten" - bottom: "loc5/permute" - top: "loc5/flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "stage8_2/sum/prior_box" - type: "PriorBox" - bottom: "stage8_2/sum" - bottom: "data" - top: "stage8_2/sum/prior_box" - prior_box_param { - min_size: 300.0 - max_size: 400.0 - aspect_ratio: 2.0 - aspect_ratio: 0.5 - aspect_ratio: 3.0 - aspect_ratio: 0.3333333432674408 - flip: false - clip: false - variance: 0.10000000149011612 - variance: 0.10000000149011612 - variance: 0.20000000298023224 - variance: 0.20000000298023224 - step: 32.0 - } -} -layer { - name: "mbox_conf" - type: "Concat" - bottom: "cls1/flatten" - bottom: "cls2/flatten" - bottom: "cls3/flatten" - bottom: "cls4/flatten" - bottom: "cls5/flatten" - top: "mbox_conf" - concat_param { - axis: 1 - } -} -layer { - name: "mbox_loc" - type: "Concat" - bottom: "loc1/flatten" - bottom: "loc2/flatten" - bottom: "loc3/flatten" - bottom: "loc4/flatten" - bottom: "loc5/flatten" - top: "mbox_loc" - concat_param { - axis: 1 - } -} -layer { - name: "mbox_priorbox" - type: "Concat" - bottom: "stage4_8/sum/prior_box" - bottom: "stage5_4/sum/prior_box" - bottom: "stage6_2/sum/prior_box" - bottom: "stage7_2/sum/prior_box" - bottom: "stage8_2/sum/prior_box" - top: "mbox_priorbox" - concat_param { - axis: 2 - } -} -layer { - name: "mbox_conf_reshape" - type: "Reshape" - bottom: "mbox_conf" - top: "mbox_conf_reshape" - reshape_param { - shape { - dim: 0 - dim: -1 - dim: 2 - } - } -} -layer { - name: "mbox_conf_softmax" - type: "Softmax" - bottom: "mbox_conf_reshape" - top: "mbox_conf_softmax" - softmax_param { - axis: 2 - } -} -layer { - name: "mbox_conf_flatten" - type: "Flatten" - bottom: "mbox_conf_softmax" - top: "mbox_conf_flatten" - flatten_param { - axis: 1 - } -} -layer { - name: "detection_output" - type: "DetectionOutput" - bottom: "mbox_loc" - bottom: "mbox_conf_flatten" - bottom: "mbox_priorbox" - top: "detection_output" - detection_output_param { - num_classes: 2 - share_location: true - background_label_id: 0 - nms_param { - nms_threshold: 0.44999998807907104 - top_k: 100 - } - code_type: CENTER_SIZE - keep_top_k: 100 - confidence_threshold: 0.20000000298023224 - } -} diff --git a/models/qrcode_wechatqrcode/example_outputs/wechat_qrcode_demo.gif b/models/qrcode_wechatqrcode/example_outputs/wechat_qrcode_demo.gif deleted file mode 100644 index 1980e19b..00000000 --- a/models/qrcode_wechatqrcode/example_outputs/wechat_qrcode_demo.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ef1aa6f9b78320b3e3d6032648261dcfe250db332a58455787c88a87711a8b58 -size 1785414 diff --git a/models/qrcode_wechatqrcode/sr_2021nov.caffemodel b/models/qrcode_wechatqrcode/sr_2021nov.caffemodel deleted file mode 100644 index ec2e9565..00000000 --- a/models/qrcode_wechatqrcode/sr_2021nov.caffemodel +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e5d36889d8e6ef2f1c1f515f807cec03979320ac81792cd8fb927c31fd658ae3 -size 23929 diff --git a/models/qrcode_wechatqrcode/sr_2021nov.prototxt b/models/qrcode_wechatqrcode/sr_2021nov.prototxt deleted file mode 100644 index e85caa17..00000000 --- a/models/qrcode_wechatqrcode/sr_2021nov.prototxt +++ /dev/null @@ -1,403 +0,0 @@ -layer { - name: "data" - type: "Input" - top: "data" - input_param { - shape { - dim: 1 - dim: 1 - dim: 224 - dim: 224 - } - } -} -layer { - name: "conv0" - type: "Convolution" - bottom: "data" - top: "conv0" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 32 - bias_term: true - pad: 1 - kernel_size: 3 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "conv0/lrelu" - type: "ReLU" - bottom: "conv0" - top: "conv0" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "db1/reduce" - type: "Convolution" - bottom: "conv0" - top: "db1/reduce" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 8 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "db1/reduce/lrelu" - type: "ReLU" - bottom: "db1/reduce" - top: "db1/reduce" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "db1/3x3" - type: "Convolution" - bottom: "db1/reduce" - top: "db1/3x3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 8 - bias_term: true - pad: 1 - kernel_size: 3 - group: 8 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "db1/3x3/lrelu" - type: "ReLU" - bottom: "db1/3x3" - top: "db1/3x3" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "db1/1x1" - type: "Convolution" - bottom: "db1/3x3" - top: "db1/1x1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 32 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "db1/1x1/lrelu" - type: "ReLU" - bottom: "db1/1x1" - top: "db1/1x1" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "db1/concat" - type: "Concat" - bottom: "conv0" - bottom: "db1/1x1" - top: "db1/concat" - concat_param { - axis: 1 - } -} -layer { - name: "db2/reduce" - type: "Convolution" - bottom: "db1/concat" - top: "db2/reduce" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 8 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "db2/reduce/lrelu" - type: "ReLU" - bottom: "db2/reduce" - top: "db2/reduce" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "db2/3x3" - type: "Convolution" - bottom: "db2/reduce" - top: "db2/3x3" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 8 - bias_term: true - pad: 1 - kernel_size: 3 - group: 8 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "db2/3x3/lrelu" - type: "ReLU" - bottom: "db2/3x3" - top: "db2/3x3" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "db2/1x1" - type: "Convolution" - bottom: "db2/3x3" - top: "db2/1x1" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 32 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "db2/1x1/lrelu" - type: "ReLU" - bottom: "db2/1x1" - top: "db2/1x1" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "db2/concat" - type: "Concat" - bottom: "db1/concat" - bottom: "db2/1x1" - top: "db2/concat" - concat_param { - axis: 1 - } -} -layer { - name: "upsample/reduce" - type: "Convolution" - bottom: "db2/concat" - top: "upsample/reduce" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 32 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "upsample/reduce/lrelu" - type: "ReLU" - bottom: "upsample/reduce" - top: "upsample/reduce" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "upsample/deconv" - type: "Deconvolution" - bottom: "upsample/reduce" - top: "upsample/deconv" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 32 - bias_term: true - pad: 1 - kernel_size: 3 - group: 32 - stride: 2 - weight_filler { - type: "msra" - } - } -} -layer { - name: "upsample/lrelu" - type: "ReLU" - bottom: "upsample/deconv" - top: "upsample/deconv" - relu_param { - negative_slope: 0.05000000074505806 - } -} -layer { - name: "upsample/rec" - type: "Convolution" - bottom: "upsample/deconv" - top: "upsample/rec" - param { - lr_mult: 1.0 - decay_mult: 1.0 - } - param { - lr_mult: 1.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 1 - bias_term: true - pad: 0 - kernel_size: 1 - group: 1 - stride: 1 - weight_filler { - type: "msra" - } - } -} -layer { - name: "nearest" - type: "Deconvolution" - bottom: "data" - top: "nearest" - param { - lr_mult: 0.0 - decay_mult: 0.0 - } - convolution_param { - num_output: 1 - bias_term: false - pad: 0 - kernel_size: 2 - group: 1 - stride: 2 - weight_filler { - type: "constant" - value: 1.0 - } - } -} -layer { - name: "Crop1" - type: "Crop" - bottom: "nearest" - bottom: "upsample/rec" - top: "Crop1" -} -layer { - name: "fc" - type: "Eltwise" - bottom: "Crop1" - bottom: "upsample/rec" - top: "fc" - eltwise_param { - operation: SUM - } -} diff --git a/models/qrcode_wechatqrcode/wechatqrcode.py b/models/qrcode_wechatqrcode/wechatqrcode.py deleted file mode 100644 index 95c98be4..00000000 --- a/models/qrcode_wechatqrcode/wechatqrcode.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import numpy as np -import cv2 as cv # needs to have cv.wechat_qrcode_WeChatQRCode, which requires compile from source with opencv_contrib/modules/wechat_qrcode - -class WeChatQRCode: - def __init__(self, detect_prototxt_path, detect_model_path, sr_prototxt_path, sr_model_path, backendId=0, targetId=0): - self._model = cv.wechat_qrcode_WeChatQRCode( - detect_prototxt_path, - detect_model_path, - sr_prototxt_path, - sr_model_path - ) - if backendId != 0 and backendId != 3: - raise NotImplementedError("Backend {} is not supported by cv.wechat_qrcode_WeChatQRCode()".format(backendId)) - if targetId != 0: - raise NotImplementedError("Target {} is not supported by cv.wechat_qrcode_WeChatQRCode()") - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - if backendId != 0 and backendId != 3: - raise NotImplementedError("Backend {} is not supported by cv.wechat_qrcode_WeChatQRCode()".format(backendId)) - if targetId != 0: - raise NotImplementedError("Target {} is not supported by cv.wechat_qrcode_WeChatQRCode()") - - def infer(self, image): - return self._model.detectAndDecode(image) diff --git a/models/text_detection_ppocr/CMakeLists.txt b/models/text_detection_ppocr/CMakeLists.txt deleted file mode 100644 index 9f56acaa..00000000 --- a/models/text_detection_ppocr/CMakeLists.txt +++ /dev/null @@ -1,29 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(project_name "opencv_zoo_text_detection_ppocr") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/text_detection_ppocr/LICENSE b/models/text_detection_ppocr/LICENSE deleted file mode 100644 index 9696cafd..00000000 --- a/models/text_detection_ppocr/LICENSE +++ /dev/null @@ -1,203 +0,0 @@ -Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/models/text_detection_ppocr/README.md b/models/text_detection_ppocr/README.md deleted file mode 100644 index 49a40034..00000000 --- a/models/text_detection_ppocr/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# PP-OCRv3 Text Detection - -PP-OCRv3: More Attempts for the Improvement of Ultra Lightweight OCR System. - -**Note**: - -- The int8 quantization model may produce unstable results due to some loss of accuracy. -- Original Paddle Models source of English: [here](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar). -- Original Paddle Models source of Chinese: [here](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar). -- `IC15` in the filename means the model is trained on [IC15 dataset](https://rrc.cvc.uab.es/?ch=4&com=introduction), which can detect English text instances only. -- `TD500` in the filename means the model is trained on [TD500 dataset](http://www.iapr-tc11.org/mediawiki/index.php/MSRA_Text_Detection_500_Database_(MSRA-TD500)), which can detect both English & Chinese instances. -- Visit https://docs.opencv.org/master/d4/d43/tutorial_dnn_text_spotting.html for more information. -- `text_detection_xx_ppocrv3_2023may_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -### Python - -Run the following command to try the demo: - -```shell -# detect on camera input -python demo.py -# detect on an image -python demo.py --input /path/to/image -v - -# get help regarding various parameters -python demo.py --help -``` - -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# A typical and default installation path of OpenCV is /usr/local -cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . -cmake --build build -# detect on camera input -./build/opencv_zoo_text_detection_ppocr -m=/path/to/model -# detect on an image -./build/opencv_zoo_text_detection_ppocr -m=/path/to/model -i=/path/to/image -v -# get help messages -./build/opencv_zoo_text_detection_ppocr -h -``` - -### Example outputs - -![mask](./example_outputs/mask.jpg) - -![gsoc](./example_outputs/gsoc.jpg) - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- https://arxiv.org/abs/2206.03001 -- https://github.com/PaddlePaddle/PaddleOCR -- https://docs.opencv.org/master/d4/d43/tutorial_dnn_text_spotting.html diff --git a/models/text_detection_ppocr/demo.cpp b/models/text_detection_ppocr/demo.cpp deleted file mode 100644 index c1faa757..00000000 --- a/models/text_detection_ppocr/demo.cpp +++ /dev/null @@ -1,186 +0,0 @@ -#include - -#include -#include -#include - -using namespace std; -using namespace cv; -using namespace dnn; - -vector< pair > backendTargetPairs = { - std::make_pair(dnn::DNN_BACKEND_OPENCV, dnn::DNN_TARGET_CPU), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA_FP16), - std::make_pair(dnn::DNN_BACKEND_TIMVX, dnn::DNN_TARGET_NPU), - std::make_pair(dnn::DNN_BACKEND_CANN, dnn::DNN_TARGET_NPU)}; - - -std::string keys = -"{ help h | | Print help message. }" -"{ model m | text_detection_cn_ppocrv3_2023may.onnx | Usage: Set model type, defaults to text_detection_ch_ppocrv3_2023may.onnx }" -"{ input i | | Usage: Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ width | 736 | Usage: Resize input image to certain width, default = 736. It should be multiple by 32.}" -"{ height | 736 | Usage: Resize input image to certain height, default = 736. It should be multiple by 32.}" -"{ binary_threshold | 0.3 | Usage: Threshold of the binary map, default = 0.3.}" -"{ polygon_threshold | 0.5 | Usage: Threshold of polygons, default = 0.5.}" -"{ max_candidates | 200 | Usage: Set maximum number of polygon candidates, default = 200.}" -"{ unclip_ratio | 2.0 | Usage: The unclip ratio of the detected text region, which determines the output size, default = 2.0.}" -"{ save s | true | Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.}" -"{ viz v | true | Usage: Specify to open a new window to show results. Invalid in case of camera input.}" -"{ backend bt | 0 | Choose one of computation backends: " -"0: (default) OpenCV implementation + CPU, " -"1: CUDA + GPU (CUDA), " -"2: CUDA + GPU (CUDA FP16), " -"3: TIM-VX + NPU, " -"4: CANN + NPU}"; - - -class PPOCRDet { -public: - - PPOCRDet(string modPath, Size inSize = Size(736, 736), float binThresh = 0.3, - float polyThresh = 0.5, int maxCand = 200, double unRatio = 2.0, - dnn::Backend bId = DNN_BACKEND_DEFAULT, dnn::Target tId = DNN_TARGET_CPU) : modelPath(modPath), inputSize(inSize), binaryThreshold(binThresh), - polygonThreshold(polyThresh), maxCandidates(maxCand), unclipRatio(unRatio), - backendId(bId), targetId(tId) - { - this->model = TextDetectionModel_DB(readNet(modelPath)); - this->model.setPreferableBackend(backendId); - this->model.setPreferableTarget(targetId); - - this->model.setBinaryThreshold(binaryThreshold); - this->model.setPolygonThreshold(polygonThreshold); - this->model.setUnclipRatio(unclipRatio); - this->model.setMaxCandidates(maxCandidates); - - this->model.setInputParams(1.0 / 255.0, inputSize, Scalar(122.67891434, 116.66876762, 104.00698793)); - } - pair< vector>, vector > infer(Mat image) { - CV_Assert(image.rows == this->inputSize.height && "height of input image != net input size "); - CV_Assert(image.cols == this->inputSize.width && "width of input image != net input size "); - vector> pt; - vector confidence; - this->model.detect(image, pt, confidence); - return make_pair< vector> &, vector< float > &>(pt, confidence); - } - -private: - string modelPath; - TextDetectionModel_DB model; - Size inputSize; - float binaryThreshold; - float polygonThreshold; - int maxCandidates; - double unclipRatio; - dnn::Backend backendId; - dnn::Target targetId; - -}; - -Mat visualize(Mat image, pair< vector>, vector >&results, double fps=-1, Scalar boxColor=Scalar(0, 255, 0), Scalar textColor=Scalar(0, 0, 255), bool isClosed=true, int thickness=2) -{ - Mat output; - image.copyTo(output); - if (fps > 0) - putText(output, format("FPS: %.2f", fps), Point(0, 15), FONT_HERSHEY_SIMPLEX, 0.5, textColor); - polylines(output, results.first, isClosed, boxColor, thickness); - return output; -} - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("Use this program to run Real-time Scene Text Detection with Differentiable Binarization in opencv Zoo using OpenCV."); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - int backendTargetid = parser.get("backend"); - String modelName = parser.get("model"); - - if (modelName.empty()) - { - CV_Error(Error::StsError, "Model file " + modelName + " not found"); - } - - Size inpSize(parser.get("width"), parser.get("height")); - float binThresh = parser.get("binary_threshold"); - float polyThresh = parser.get("polygon_threshold"); - int maxCand = parser.get("max_candidates"); - double unRatio = parser.get("unclip_ratio"); - bool save = parser.get("save"); - bool viz = parser.get("viz"); - - PPOCRDet model(modelName, inpSize, binThresh, polyThresh, maxCand, unRatio, backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - - //! [Open a video file or an image file or a camera stream] - VideoCapture cap; - if (parser.has("input")) - cap.open(parser.get("input")); - else - cap.open(0); - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot open video or file"); - Mat originalImage; - static const std::string kWinName = modelName; - while (waitKey(1) < 0) - { - cap >> originalImage; - if (originalImage.empty()) - { - if (parser.has("input")) - { - cout << "Frame is empty" << endl; - break; - } - else - continue; - } - int originalW = originalImage.cols; - int originalH = originalImage.rows; - double scaleHeight = originalH / double(inpSize.height); - double scaleWidth = originalW / double(inpSize.width); - Mat image; - resize(originalImage, image, inpSize); - - // inference - TickMeter tm; - tm.start(); - pair< vector>, vector > results = model.infer(image); - tm.stop(); - auto x = results.first; - // Scale the results bounding box - for (auto &pts : results.first) - { - for (int i = 0; i < 4; i++) - { - pts[i].x = int(pts[i].x * scaleWidth); - pts[i].y = int(pts[i].y * scaleHeight); - } - } - originalImage = visualize(originalImage, results, tm.getFPS()); - tm.reset(); - if (parser.has("input")) - { - if (save) - { - cout << "Result image saved to result.jpg\n"; - imwrite("result.jpg", originalImage); - } - if (viz) - { - imshow(kWinName, originalImage); - waitKey(0); - } - } - else - imshow(kWinName, originalImage); - } - return 0; -} - - diff --git a/models/text_detection_ppocr/demo.py b/models/text_detection_ppocr/demo.py deleted file mode 100644 index 18a5efaa..00000000 --- a/models/text_detection_ppocr/demo.py +++ /dev/null @@ -1,155 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from ppocr_det import PPOCRDet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser(description='PP-OCR Text Detection (https://arxiv.org/abs/2206.03001).') -parser.add_argument('--input', '-i', type=str, - help='Usage: Set path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='./text_detection_en_ppocrv3_2023may.onnx', - help='Usage: Set model path, defaults to text_detection_en_ppocrv3_2023may.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--width', type=int, default=736, - help='Usage: Resize input image to certain width, default = 736. It should be multiple by 32.') -parser.add_argument('--height', type=int, default=736, - help='Usage: Resize input image to certain height, default = 736. It should be multiple by 32.') -parser.add_argument('--binary_threshold', type=float, default=0.3, - help='Usage: Threshold of the binary map, default = 0.3.') -parser.add_argument('--polygon_threshold', type=float, default=0.5, - help='Usage: Threshold of polygons, default = 0.5.') -parser.add_argument('--max_candidates', type=int, default=200, - help='Usage: Set maximum number of polygon candidates, default = 200.') -parser.add_argument('--unclip_ratio', type=np.float64, default=2.0, - help=' Usage: The unclip ratio of the detected text region, which determines the output size, default = 2.0.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(image, results, box_color=(0, 255, 0), text_color=(0, 0, 255), isClosed=True, thickness=2, fps=None): - output = image.copy() - - if fps is not None: - cv.putText(output, 'FPS: {:.2f}'.format(fps), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, text_color) - - pts = np.array(results[0]) - output = cv.polylines(output, pts, isClosed, box_color, thickness) - - return output - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate model - model = PPOCRDet(modelPath=args.model, - inputSize=[args.width, args.height], - binaryThreshold=args.binary_threshold, - polygonThreshold=args.polygon_threshold, - maxCandidates=args.max_candidates, - unclipRatio=args.unclip_ratio, - backendId=backend_id, - targetId=target_id) - - # If input is an image - if args.input is not None: - original_image = cv.imread(args.input) - original_w = original_image.shape[1] - original_h = original_image.shape[0] - scaleHeight = original_h / args.height - scaleWidth = original_w / args.width - image = cv.resize(original_image, [args.width, args.height]) - - # Inference - results = model.infer(image) - - # Scale the results bounding box - for i in range(len(results[0])): - for j in range(4): - box = results[0][i][j] - results[0][i][j][0] = box[0] * scaleWidth - results[0][i][j][1] = box[1] * scaleHeight - - # Print results - print('{} texts detected.'.format(len(results[0]))) - for idx, (bbox, score) in enumerate(zip(results[0], results[1])): - print('{}: {} {} {} {}, {:.2f}'.format(idx, bbox[0], bbox[1], bbox[2], bbox[3], score)) - - # Draw results on the input image - original_image = visualize(original_image, results) - - # Save results if save is true - if args.save: - print('Resutls saved to result.jpg\n') - cv.imwrite('result.jpg', original_image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, original_image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, original_image = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - original_w = original_image.shape[1] - original_h = original_image.shape[0] - scaleHeight = original_h / args.height - scaleWidth = original_w / args.width - frame = cv.resize(original_image, [args.width, args.height]) - # Inference - tm.start() - results = model.infer(frame) # results is a tuple - tm.stop() - - # Scale the results bounding box - for i in range(len(results[0])): - for j in range(4): - box = results[0][i][j] - results[0][i][j][0] = box[0] * scaleWidth - results[0][i][j][1] = box[1] * scaleHeight - - # Draw results on the input image - original_image = visualize(original_image, results, fps=tm.getFPS()) - - # Visualize results in a new Window - cv.imshow('{} Demo'.format(model.name), original_image) - - tm.reset() diff --git a/models/text_detection_ppocr/example_outputs/gsoc.jpg b/models/text_detection_ppocr/example_outputs/gsoc.jpg deleted file mode 100644 index 58d29c89..00000000 --- a/models/text_detection_ppocr/example_outputs/gsoc.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c6c852b20c3b187d3eefc7e0d4e89a89ec96637dfc544f8169bcfe4981ce8143 -size 314342 diff --git a/models/text_detection_ppocr/example_outputs/mask.jpg b/models/text_detection_ppocr/example_outputs/mask.jpg deleted file mode 100644 index 5f36556c..00000000 --- a/models/text_detection_ppocr/example_outputs/mask.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1d5b84065442652e94a78fbcf11f210668862f205dad52e7fbf1642a5371898d -size 121326 diff --git a/models/text_detection_ppocr/ppocr_det.py b/models/text_detection_ppocr/ppocr_det.py deleted file mode 100644 index fac01a2e..00000000 --- a/models/text_detection_ppocr/ppocr_det.py +++ /dev/null @@ -1,59 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import numpy as np -import cv2 as cv - -class PPOCRDet: - def __init__(self, modelPath, inputSize=[736, 736], binaryThreshold=0.3, polygonThreshold=0.5, maxCandidates=200, unclipRatio=2.0, backendId=0, targetId=0): - self._modelPath = modelPath - self._model = cv.dnn_TextDetectionModel_DB( - cv.dnn.readNet(self._modelPath) - ) - - self._inputSize = tuple(inputSize) # (w, h) - self._inputHeight = inputSize[0] - self._inputWidth = inputSize[1] - self._binaryThreshold = binaryThreshold - self._polygonThreshold = polygonThreshold - self._maxCandidates = maxCandidates - self._unclipRatio = unclipRatio - self._backendId = backendId - self._targetId = targetId - - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - self._model.setBinaryThreshold(self._binaryThreshold) - self._model.setPolygonThreshold(self._polygonThreshold) - self._model.setUnclipRatio(self._unclipRatio) - self._model.setMaxCandidates(self._maxCandidates) - - self._model.setInputSize(self._inputSize) - self._model.setInputMean((123.675, 116.28, 103.53)) - self._model.setInputScale(1.0/255.0/np.array([0.229, 0.224, 0.225])) - - @property - def name(self): - return self.__class__.__name__ - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def setInputSize(self, input_size): - self._inputSize = tuple(input_size) - self._model.setInputSize(self._inputSize) - self._model.setInputMean((123.675, 116.28, 103.53)) - self._model.setInputScale(1.0/255.0/np.array([0.229, 0.224, 0.225])) - - def infer(self, image): - assert image.shape[0] == self._inputSize[1], '{} (height of input image) != {} (preset height)'.format(image.shape[0], self._inputSize[1]) - assert image.shape[1] == self._inputSize[0], '{} (width of input image) != {} (preset width)'.format(image.shape[1], self._inputSize[0]) - - return self._model.detect(image) diff --git a/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may.onnx b/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may.onnx deleted file mode 100644 index e55d8596..00000000 --- a/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:03f550c6b406fda8bf54bd8327815f6c7e2edd98cea02348c93d879254366587 -size 2423490 diff --git a/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8.onnx b/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8.onnx deleted file mode 100644 index bf51a45d..00000000 --- a/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d094e1bd27ed294acfb7bb608ac87e27d12860b67eebcb45c387288ea9ec4b36 -size 705007 diff --git a/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8bq.onnx b/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8bq.onnx deleted file mode 100644 index a3f95270..00000000 --- a/models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7f4638708dde26fc77b3cd84aed01019d281268276933ca0e13b0ade5220875f -size 855375 diff --git a/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may.onnx b/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may.onnx deleted file mode 100644 index e55d8596..00000000 --- a/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:03f550c6b406fda8bf54bd8327815f6c7e2edd98cea02348c93d879254366587 -size 2423490 diff --git a/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8.onnx b/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8.onnx deleted file mode 100644 index 6ced759d..00000000 --- a/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5700c6d43bfc022b4bf2905cd0bac1a3d7dc41f4f954e9c171314ae9b4f0e41a -size 705007 diff --git a/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8bq.onnx b/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8bq.onnx deleted file mode 100644 index a3f95270..00000000 --- a/models/text_detection_ppocr/text_detection_en_ppocrv3_2023may_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7f4638708dde26fc77b3cd84aed01019d281268276933ca0e13b0ade5220875f -size 855375 diff --git a/models/text_recognition_crnn/CMakeLists.txt b/models/text_recognition_crnn/CMakeLists.txt deleted file mode 100644 index 15a73813..00000000 --- a/models/text_recognition_crnn/CMakeLists.txt +++ /dev/null @@ -1,29 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -set(project_name "opencv_zoo_text_recognition_crnn") - -PROJECT (${project_name}) - -set(OPENCV_VERSION "4.10.0") -set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") -find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) -# Find OpenCV, you may need to set OpenCV_DIR variable -# to the absolute path to the directory containing OpenCVConfig.cmake file -# via the command line or GUI - -file(GLOB SourceFile - "demo.cpp") -# If the package has been found, several variables will -# be set, you can find the full list with descriptions -# in the OpenCVConfig.cmake file. -# Print some message showing some of them -message(STATUS "OpenCV library status:") -message(STATUS " config: ${OpenCV_DIR}") -message(STATUS " version: ${OpenCV_VERSION}") -message(STATUS " libraries: ${OpenCV_LIBS}") -message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") - -# Declare the executable target built from your sources -add_executable(${project_name} ${SourceFile}) - -# Link your application with OpenCV libraries -target_link_libraries(${project_name} PRIVATE ${OpenCV_LIBS}) diff --git a/models/text_recognition_crnn/LICENSE b/models/text_recognition_crnn/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/models/text_recognition_crnn/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/models/text_recognition_crnn/README.md b/models/text_recognition_crnn/README.md deleted file mode 100644 index 5f0a3f5a..00000000 --- a/models/text_recognition_crnn/README.md +++ /dev/null @@ -1,103 +0,0 @@ -# CRNN - -[An End-to-End Trainable Neural Network for Image-based Sequence Recognition and Its Application to Scene Text Recognition](https://arxiv.org/abs/1507.05717) - -Results of accuracy evaluation with [tools/eval](../../tools/eval) at different text recognition datasets. - -| Model name | ICDAR03(%) | IIIT5k(%) | CUTE80(%) | -| ------------ | ---------- | --------- | --------- | -| CRNN_EN | 81.66 | 74.33 | 52.78 | -| CRNN_EN_FP16 | 82.01 | 74.93 | 52.34 | -| CRNN_EN_INT8 | 81.75 | 75.33 | 52.43 | -| CRNN_CH | 71.28 | 80.90 | 67.36 | -| CRNN_CH_FP16 | 78.63 | 80.93 | 67.01 | -| CRNN_CH_INT8 | 78.11 | 81.20 | 67.01 | - -\*: 'FP16' or 'INT8' stands for 'model quantized into FP16' or 'model quantized into int8' - -**Note**: - -- Model source: - - `text_recognition_CRNN_EN_2021sep.onnx`: https://docs.opencv.org/4.5.2/d9/d1e/tutorial_dnn_OCR.html (CRNN_VGG_BiLSTM_CTC.onnx) - - `text_recognition_CRNN_CH_2021sep.onnx`: https://docs.opencv.org/4.x/d4/d43/tutorial_dnn_text_spotting.html (crnn_cs.onnx) - - `text_recognition_CRNN_CN_2021nov.onnx`: https://docs.opencv.org/4.5.2/d4/d43/tutorial_dnn_text_spotting.html (crnn_cs_CN.onnx) -- `text_recognition_CRNN_EN_2021sep.onnx` can detect digits (0\~9) and letters (return lowercase letters a\~z) (see `CHARSET_EN_36` for details in `crnn.py`). -- `text_recognition_CRNN_CH_2021sep.onnx` can detect digits (0\~9), upper/lower-case letters (a\~z and A\~Z), and some special characters (see `CHARSET_CH_94` for details in `crnn.py`). -- `text_recognition_CRNN_CN_2021nov.onnx` can detect digits (0\~9), upper/lower-case letters (a\~z and A\~Z), some Chinese characters and some special characters (see `CHARSET_CN_3944` for details in `crnn.py`). -- For details on training this model series, please visit https://github.com/zihaomu/deep-text-recognition-benchmark. -- `text_recognition_CRNN_XX_2021xxx_int8bq.onnx` represents the block-quantized version in int8 precision and is generated using [block_quantize.py](../../tools/quantize/block_quantize.py) with `block_size=64`. - -## Demo - -***NOTE***: - -- This demo uses [text_detection_db](../text_detection_db) as text detector. - -### Python - -Run the demo detecting English: - -```shell -# detect on camera input -python demo.py -# detect on an image -python demo.py --input /path/to/image -v - -# get help regarding various parameters -python demo.py --help -``` - -Run the demo detecting Chinese: - -```shell -# detect on camera input -python demo.py --model text_recognition_CRNN_CN_2021nov.onnx -# detect on an image -python demo.py --input /path/to/image --model text_recognition_CRNN_CN_2021nov.onnx - -# get help regarding various parameters -python demo.py --help -``` -### C++ - -Install latest OpenCV and CMake >= 3.24.0 to get started with: - -```shell -# detect on camera input -./build/opencv_zoo_text_recognition_crnn -# detect on an image -./build/opencv_zoo_text_recognition_crnn --input /path/to/image -v - -# get help regarding various parameters -./build/opencv_zoo_text_recognition_crnn --help -``` - -Run the demo detecting Chinese: - -```shell -# detect on camera input -./build/opencv_zoo_text_recognition_crnn --model=text_recognition_CRNN_CN_2021nov.onnx --charset=charset_3944_CN.txt -# detect on an image -./build/opencv_zoo_text_recognition_crnn --input=/path/to/image --model=text_recognition_CRNN_CN_2021nov.onnx --charset=charset_3944_CN.txt - -# get help regarding various parameters -./build/opencv_zoo_text_recognition_crnn --help -``` - -### Examples - -![CRNNCTC](./example_outputs/CRNNCTC.gif) - -![demo](./example_outputs/demo.jpg) - -## License - -All files in this directory are licensed under [Apache 2.0 License](./LICENSE). - -## Reference - -- https://arxiv.org/abs/1507.05717 -- https://github.com/bgshih/crnn -- https://github.com/meijieru/crnn.pytorch -- https://github.com/zihaomu/deep-text-recognition-benchmark -- https://docs.opencv.org/4.5.2/d9/d1e/tutorial_dnn_OCR.html diff --git a/models/text_recognition_crnn/charset_32_94_3944.h b/models/text_recognition_crnn/charset_32_94_3944.h deleted file mode 100644 index 3e2d2419..00000000 --- a/models/text_recognition_crnn/charset_32_94_3944.h +++ /dev/null @@ -1,4092 +0,0 @@ -#include -#include - -std::vector loadCharset(std::string name){ - std::vector CHARSET_EN_36 = { - u"0", - u"1", - u"2", - u"3", - u"4", - u"5", - u"6", - u"7", - u"8", - u"9", - u"a", - u"b", - u"c", - u"d", - u"e", - u"f", - u"g", - u"h", - u"i", - u"j", - u"k", - u"l", - u"m", - u"n", - u"o", - u"p", - u"q", - u"r", - u"s", - u"t", - u"u", - u"v", - u"w", - u"x", - u"y", - u"z" }; - - std::vector CHARSET_CH_94 = { - u"0", - u"1", - u"2", - u"3", - u"4", - u"5", - u"6", - u"7", - u"8", - u"9", - u"a", - u"b", - u"c", - u"d", - u"e", - u"f", - u"g", - u"h", - u"i", - u"j", - u"k", - u"l", - u"m", - u"n", - u"o", - u"p", - u"q", - u"r", - u"s", - u"t", - u"u", - u"v", - u"w", - u"x", - u"y", - u"z", - u"A", - u"B", - u"C", - u"D", - u"E", - u"F", - u"G", - u"H", - u"I", - u"J", - u"K", - u"L", - u"M", - u"N", - u"O", - u"P", - u"Q", - u"R", - u"S", - u"T", - u"U", - u"V", - u"W", - u"X", - u"Y", - u"Z", - u"!", - u"\"", - u"#", - u"$", - u"%", - u"&", - u"'", - u"(", - u")", - u"*", - u"+", - u",", - u"-", - u".", - u"/", - u":", - u";", - u"<", - u"=", - u">", - u"?", - u"@", - u"[", - u"\\", - u"]", - u"^", - u"_", - u"`", - u"{", - u"|", - u"}" }; - - std::vector CHARSET_CN_3944 = { - u"H", - u"O", - u"K", - u"I", - u"T", - u"E", - u"A", - u"酱", - u"鸭", - u"传", - u"奇", - u"J", - u"N", - u"G", - u"Y", - u"C", - u"U", - u"Q", - u"蝦", - u"兵", - u"蟹", - u"煲", - u"这", - u"是", - u"可", - u"以", - u"先", - u"吃", - u"后", - u"涮", - u"的", - u"干", - u"锅", - u"菜", - u"加", - u"盟", - u"电", - u"话", - u":", - u"1", - u"7", - u"3", - u"9", - u"8", - u"郑", - u"州", - u"总", - u"店", - u"雪", - u"花", - u"勇", - u"闯", - u"天", - u"涯", - u"虾", - u",", - u"一", - u"送", - u"鱼", - u"锡", - u"纸", - u"蛤", - u"土", - u"豆", - u"粉", - u"砂", - u"米", - u"线", - u"牛", - u"筋", - u"面", - u"刀", - u"削", - u"水", - u"饺", - u"吧", - u"沙", - u"拉", - u"老", - u"饭", - u"盒", - u"教", - u"室", - u"主", - u"题", - u"餐", - u"厅", - u"仁", - u"馄", - u"饨", - u"重", - u"庆", - u"小", - u"便", - u"当", - u"全", - u"国", - u"连", - u"锁", - u"4", - u"0", - u"-", - u"6", - u"5", - u"2", - u"人", - u"快", - u"量", - u"贩", - u"蓬", - u"朗", - u"御", - u"茶", - u"川", - u"渝", - u"捞", - u"火", - u"古", - u"之", - u"匠", - u"今", - u"七", - u"西", - u"域", - u"羊", - u"城", - u"l", - u"i", - u"k", - u"n", - u"g", - u"c", - u"o", - u"f", - u"e", - u"w", - u"贵", - u"阳", - u"素", - u"有", - u"家", - u"会", - u"展", - u"口", - u"乐", - u"三", - u"惹", - u"烤", - u"肉", - u"h", - u"t", - u"子", - u"馆", - u"常", - u"盖", - u"浇", - u"兴", - u"业", - u"路", - u"书", - u"亦", - u"燒", - u"仙", - u"草", - u"L", - u":", - u"德", - u"啤", - u"工", - u"坊", - u"杏", - u"屋", - u"高", - u"桥", - u"号", - u"品", - u"麻", - u"辣", - u"烫", - u"检", - u"官", - u".", - u"千", - u"翼", - u"木", - u"兰", - u"画", - u"食", - u"上", - u"汤", - u"剁", - u"馅", - u"手", - u"煮", - u"时", - u"尚", - u"健", - u"康", - u"傲", - u"椒", - u"B", - u"啵", - u"条", - u"脾", - u"气", - u"!", - u"/", - u"月", - u"腾", - u"讯", - u"应", - u"用", - u"喵", - u"泡", - u"我", - u"鲜", - u"滚", - u"给", - u"你", - u"看", - u"客", - u"来", - u"香", - u"汉", - u"湘", - u"本", - u"地", - u"炒", - u"系", - u"列", - u"订", - u"仔", - u"肘", - u"蹄", - u"梅", - u"扣", - u"黄", - u"焖", - u"排", - u"骨", - u"炖", - u"鸡", - u"韓", - u"金", - u"利", - u"串", - u"舊", - u"街", - u"梨", - u"村", - u"座", - u"经", - u"济", - u"实", - u"惠", - u"绿", - u"色", - u"炭", - u"庐", - u"蛙", - u"忆", - u"蓉", - u"源", - u"真", - u"d", - u"D", - u"概", - u"念", - u"创", - u"意", - u"六", - u"熏", - u"各", - u"种", - u"精", - u"美", - u"y", - u"疯", - u"狂", - u"世", - u"界", - u"杯", - u"特", - u"价", - u"酒", - u"元", - u"瓶", - u"沸", - u"带", - u"F", - u"请", - u"二", - u"楼", - u"自", - u"动", - u"升", - u"降", - u"烏", - u"邦", - u"嗦", - u"味", - u"风", - u"货", - u"团", - u"外", - u"卖", - u"嘞", - u"个", - u"折", - u"辛", - u"束", - u"舌", - u"尖", - u"中", - u"包", - u"浆", - u"腐", - u"r", - u"P", - u"a", - u"u", - u"丸", - u"作", - u"福", - u"M", - u"漫", - u"蜜", - u"冰", - u"拌", - u"匆", - u"那", - u"年", - u"R", - u"S", - u"果", - u"光", - u"夹", - u"馍", - u"凉", - u"皮", - u"过", - u"祖", - u"南", - u"山", - u"風", - u"景", - u"堂", - u"烘", - u"培", - u"龍", - u"坎", - u"半", - u"婆", - u"建", - u"设", - u"富", - u"强", - u"丽", - u"菏", - u"泽", - u"省", - u"安", - u"港", - u"竹", - u"签", - u"撩", - u"只", - u"为", - u"好", - u"生", - u"活", - u"抓", - u"海", - u"最", - u"网", - u"红", - u"铁", - u"统", - u"®", - u"功", - u"夫", - u"鱿", - u"大", - u"闻", - u"就", - u"知", - u"遇", - u"见", - u"文", - u"合", - u"热", - u"森", - u"台", - u"湾", - u"卤", - u"然", - u"汁", - u"甄", - u"选", - u"材", - u"还", - u"原", - u"初", - u"衷", - u"*", - u"洪", - u"龙", - u"公", - u"酸", - u"巴", - u"乡", - u"焦", - u"烧", - u"淘", - u"成", - u"都", - u"眼", - u"镜", - u"优", - u"菓", - u"恋", - u"V", - u"化", - u"糖", - u"、", - u"粥", - u"田", - u"螺", - u"斓", - u"X", - u"爺", - u"W", - u"j", - u"院", - u"华", - u"Z", - u"蜊", - u"北", - u"京", - u"刷", - u"蝎", - u"腿", - u"梦", - u"幻", - u"奶", - u"式", - u"蛋", - u"鍋", - u"区", - u"·", - u"领", - u"航", - u"者", - u"四", - u"通", - u"往", - u"楚", - u"河", - u"停", - u"车", - u"场", - u"凌", - u"晨", - u"点", - u"杞", - u"缘", - u"王", - u"集", - u"唐", - u"菠", - u"萝", - u"泰", - u"板", - u"鳳", - u"凰", - u"樓", - u"名", - u"壹", - u"猪", - u"晴", - u"舍", - u"犟", - u"师", - u"傅", - u"飯", - u"致", - u"青", - u"春", - u"轰", - u"炸", - u"卡", - u"里", - u"身", - u"厨", - u"房", - u"x", - u"聚", - u"鑫", - u"阁", - u"岛", - u"纯", - u"聘", - u"专", - u"长", - u"庄", - u"鄉", - u"更", - u"珍", - u"固", - u"新", - u"岩", - u"v", - u"s", - u"m", - u"至", - u"尊", - u"比", - u"萨", - u"广", - u"披", - u"饮", - u"管", - u"理", - u"限", - u"司", - u"p", - u"幸", - u"东", - u"正", - u"挞", - u"少", - u"女", - u"克", - u"装", - u"童", - u"哒", - u"磨", - u"厂", - u"怼", - u"纤", - u"入", - u"户", - u"独", - u"溜", - u"共", - u"享", - u"滋", - u"江", - u"门", - u"九", - u"蒸", - u"胜", - u"盛", - u"&", - u"魔", - u"爪", - u"鹅", - u"皇", - u"(", - u")", - u"友", - u"甲", - u"魚", - u"首", - u"烹", - u"行", - u"员", - u"若", - u"资", - u"议", - u"联", - u"同", - u"急", - u"私", - u"燕", - u"儿", - u"巢", - u"鹏", - u"记", - u"腊", - u"营", - u"欢", - u"迎", - u"旗", - u"舰", - u"叫", - u"了", - u"做", - u"故", - u"铃", - u"煎", - u"饼", - u"哥", - u"力", - u"五", - u"谷", - u"野", - u"戈", - u"厠", - u"所", - u"超", - u"牌", - u"冒", - u"陳", - u"陈", - u"苕", - u"爽", - u"滑", - u"启", - u"秦", - u"择", - u"现", - u"进", - u"惊", - u"喜", - u"定", - u"于", - u"雅", - u"膳", - u"多", - u"推", - u"淇", - u"淋", - u"b", - u"思", - u"堡", - u"偶", - u"相", - u"伴", - u"呈", - u"湯", - u"绝", - u"浏", - u"\"", - u"刘", - u"态", - u"牧", - u"万", - u"达", - u"和", - u"番", - u"丼", - u"—", - u"机", - u"瘦", - u"绵", - u"柔", - u"厉", - u"蚝", - u"娘", - u"彩", - u"百", - u"事", - u"调", - u"韩", - u"爱", - u"喝", - u"玩", - u"放", - u"肆", - u"寿", - u"净", - u"配", - u"髓", - u"非", - u"道", - u"额", - u"吉", - u"招", - u"商", - u"杂", - u"粮", - u"筐", - u"运", - u"转", - u"服", - u"务", - u"缤", - u"灿", - u"腕", - u"楠", - u"彤", - u"学", - u"橋", - u"试", - u"浩", - u"减", - u"薪", - u"诚", - u"霸", - u"第", - u"间", - u"日", - u"极", - u"料", - u"開", - u"業", - u"霏", - u"星", - u"期", - u"分", - u"秒", - u"内", - u"咨", - u"询", - u"。", - u"樐", - u"头", - u"开", - u"氏", - u"渔", - u"约", - u"劳", - u"保", - u"礼", - u"宏", - u"武", - u"佘", - u"轻", - u"奢", - u"艺", - u"井", - u"隆", - u"鐵", - u"卷", - u"染", - u"焙", - u"钵", - u"马", - u"牟", - u"洋", - u"芋", - u"片", - u"流", - u"宽", - u"心", - u"位", - u"清", - u"潼", - u"关", - u"祥", - u"背", - u"凡", - u"哈", - u"尔", - u"滨", - u"珠", - u"派", - u"艾", - u"让", - u"变", - u"得", - u"样", - u"玖", - u"等", - u"综", - u"性", - u"涵", - u"粗", - u"冠", - u"記", - u"肠", - u"湖", - u"财", - u"贡", - u"桃", - u"杭", - u"平", - u"桂", - u"林", - u"煨", - u"档", - u"案", - u"造", - u"潮", - u"汕", - u"宗", - u"单", - u"县", - u"鲁", - u"舜", - u"脆", - u"酥", - u"糕", - u"仕", - u"十", - u"临", - u"簋", - u"宴", - u"字", - u"太", - u"灌", - u"薄", - u"尝", - u"址", - u"晗", - u"幢", - u"购", - u"梁", - u"醉", - u"皖", - u"庭", - u"白", - u"肥", - u"块", - u"石", - u"碗", - u"颜", - u"值", - u"張", - u"瘾", - u"跷", - u"脚", - u"而", - u"叁", - u"蜀", - u"橙", - u"市", - u"边", - u"早", - u"晚", - u"云", - u"吞", - u"目", - u"表", - u"赵", - u"烩", - u"擀", - u"蔬", - u"找", - u"回", - u"游", - u"刃", - u"余", - u"支", - u"洗", - u"吹", - u"休", - u"闲", - u"简", - u"撸", - u"根", - u"据", - u"鸽", - u"铜", - u"亲", - u"贝", - u"纪", - u"吕", - u"豚", - u"饅", - u"悦", - u"汇", - u"油", - u"无", - u"制", - u"在", - u"寻", - u"碳", - u"馋", - u"嘴", - u"架", - u"荣", - u"斋", - u"护", - u"角", - u"落", - u"铺", - u"臊", - u"丝", - u"围", - u"柳", - u"蛳", - u"蒲", - u"庙", - u"视", - u"荐", - u"缃", - u"想", - u"呀", - u"姜", - u"母", - u"起", - u"泉", - u"族", - u"群", - u"众", - u"其", - u"它", - u"血", - u"双", - u"补", - u"阴", - u"润", - u"不", - u"禽", - u"类", - u"款", - u"较", - u"候", - u"些", - u"畅", - u"脉", - u"痰", - u"疏", - u"肝", - u"帮", - u"助", - u"消", - u"增", - u"欲", - u"尤", - u"对", - u"胃", - u"畏", - u"寒", - u"很", - u"效", - u"秘", - u"黑", - u"嘿", - u"佳", - u"越", - u"脑", - u"桶", - u"项", - u"▪", - u"|", - u"榜", - u"许", - u"仿", - u"或", - u"酬", - u"宾", - u"指", - u"买", - u"赠", - u"笃", - u"鼎", - u"盆", - u"™", - u"咕", - u"咾", - u"肚", - u"识", - u"栖", - u"凤", - u"渡", - u"筒", - u"彬", - u"弟", - u"醋", - u"財", - u"師", - u"民", - u"博", - u"丁", - u"扒", - u"翅", - u"墨", - u"柠", - u"檬", - u"紫", - u"薯", - u"焗", - u"芝", - u"士", - u"胸", - u"图", - u"妮", - u"杀", - u"菌", - u"爹", - u"尽", - u"归", - u"宁", - u"粽", - u"瑞", - u"轩", - u"午", - u"陕", - u"出", - u"才", - u"盘", - u"植", - u"甜", - u"粒", - u"神", - u"舟", - u"玻", - u"璃", - u"医", - u"划", - u"药", - u"郡", - u"毛", - u"张", - u"姐", - u"留", - u"满", - u"下", - u"兄", - u"法", - u"鋪", - u"é", - u"[", - u"槑", - u"]", - u"言", - u"密", - u"帝", - u"場", - u"朴", - u"寨", - u"奉", - u"z", - u"什", - u"顺", - u"疆", - u"馕", - u"豫", - u"怀", - u"旧", - u"验", - u"昙", - u"搞", - u"圣", - u"格", - u"ǐ", - u"à", - u"隱", - u"燙", - u"状", - u"居", - u"饱", - u"底", - u"免", - u"费", - u"廣", - u"點", - u"專", - u"門", - u"语", - u"叉", - u"左", - u"岸", - u"发", - u"乌", - u"齐", - u"冷", - u"命", - u"●", - u"修", - u"闸", - u"飞", - u"空", - u"养", - u"笼", - u"興", - u"银", - u"套", - u"東", - u"吴", - u"麺", - u"館", - u"¥", - u"从", - u"前", - u"乙", - u"弘", - u"炝", - u"夏", - u"秋", - u"冬", - u"咖", - u"啡", - u"℃", - u"©", - u"莲", - u"塘", - u"哆", - u"梓", - u"依", - u"哎", - u"麦", - u"泗", - u"泾", - u"瓯", - u"胡", - u"∣", - u"歺", - u"八", - u"度", - u"深", - u"夜", - u"旋", - u"永", - u"远", - u"温", - u"又", - u"晶", - u"溏", - u"ä", - u"盔", - u"飘", - u"劲", - u"旺", - u"楸", - u"良", - u"譜", - u"餅", - u"苏", - u"莎", - u"足", - u"宵", - u"与", - u"楊", - u"國", - u"莱", - u"卜", - u"炊", - u"挑", - u"剔", - u"存", - u"错", - u"方", - u"程", - u"解", - u"能", - u"堆", - u"洲", - u"诗", - u"玛", - u"渴", - u"脖", - u"丛", - u"狼", - u"翁", - u"姓", - u"葫", - u"芦", - u"沾", - u"葵", - u"の", - u"咔", - u"粹", - u"弥", - u"乖", - u"悠", - u"茗", - u"别", - u"走", - u"柒", - u"榨", - u"咥", - u"虹", - u"沏", - u"桔", - u"叔", - u"贴", - u"办", - u"充", - u"崎", - u"鮮", - u"属", - u"彭", - u"浦", - u"町", - u"郎", - u"°", - u"悟", - u"惑", - u"科", - u"英", - u"育", - u"岁", - u"幼", - u"园", - u"慢", - u"摆", - u"_", - u"狐", - u"狸", - u"典", - u"暴", - u"帥", - u"尾", - u"琼", - u"見", - u"望", - u"烟", - u"坚", - u"鸳", - u"鸯", - u"直", - u"校", - u"饪", - u"承", - u"们", - u"么", - u"¥", - u"份", - u"宇", - u"炉", - u"峰", - u"乃", - u"趣", - u"代", - u"刨", - u"抖", - u"音", - u"占", - u"谜", - u"答", - u"熟", - u"控", - u"蕾", - u"节", - u"社", - u"您", - u"《", - u"羅", - u"茉", - u"瀞", - u"憨", - u"尼", - u"丰", - u"镇", - u"酿", - u"避", - u"抢", - u"突", - u"破", - u"杰", - u"姆", - u"波", - u"观", - u"澜", - u"庫", - u"舒", - u"谁", - u"短", - u"島", - u"爷", - u"码", - u"每", - u"欧", - u"注", - u"册", - u"标", - u"腸", - u"奈", - u"熊", - u"粵", - u"吳", - u"衢", - u"雄", - u"际", - u"葱", - u"柱", - u"压", - u"陪", - u"器", - u"厘", - u"柴", - u"席", - u"饿", - u"俏", - u"汽", - u"站", - u"霜", - u"荟", - u"禾", - u"咘", - u"臭", - u"夷", - u"肖", - u"微", - u"组", - u"刺", - u"拼", - u"打", - u"信", - u"步", - u"!", - u"说", - u"囍", - u"智", - u"藍", - u"鹿", - u"巷", - u"顾", - u"勃", - u"頭", - u"帕", - u"徐", - u"渣", - u"嗨", - u"鲍", - u"抽", - u"莊", - u"胗", - u"耳", - u"栈", - u"葑", - u"谊", - u"李", - u"够", - u"歪", - u"到", - u"杜", - u"绪", - u"始", - u"“", - u"”", - u"编", - u"感", - u"谢", - u"阿", - u"妹", - u"抄", - u"屿", - u"旁", - u"钟", - u"糰", - u"鷄", - u"觉", - u"队", - u"明", - u"没", - u"幺", - u"罗", - u"恭", - u"發", - u"溢", - u"圆", - u"筵", - u"鲩", - u"斤", - u"噜", - u"府", - u"雕", - u"牦", - u"津", - u"間", - u"粤", - u"义", - u"驾", - u"嫩", - u"眷", - u"苔", - u"怡", - u"逍", - u"遥", - u"即", - u"把", - u"季", - u"鹃", - u"妈", - u"烙", - u"淡", - u"嘟", - u"班", - u"散", - u"磐", - u"稣", - u"耍", - u"芽", - u"昌", - u"粿", - u"鼓", - u"姑", - u"央", - u"告", - u"翔", - u"迦", - u"缆", - u"怪", - u"俗", - u"菩", - u"宥", - u"酵", - u"男", - u"顿", - u"蚂", - u"蚁", - u"q", - u"緑", - u"瑩", - u"養", - u"滿", - u"接", - u"立", - u"勤", - u"封", - u"徽", - u"酷", - u"(", - u"慕", - u"曹", - u"吊", - u"咸", - u"矿", - u"黛", - u"刻", - u"呗", - u"布", - u"袋", - u"钝", - u"丘", - u"逗", - u"窗", - u"吾", - u"塔", - u"坡", - u"周", - u"雙", - u"朝", - u"末", - u"如", - u"杨", - u"淮", - u"摄", - u"影", - u"翻", - u"窝", - u"物", - u"椰", - u"荞", - u"搅", - u"陇", - u"收", - u"两", - u"倍", - u"狮", - u"伊", - u"後", - u"晖", - u"長", - u"箐", - u"豪", - u"耀", - u"漢", - u"釜", - u"宮", - u"次", - u"掌", - u"斯", - u"朋", - u"针", - u"菇", - u"蚬", - u"拍", - u"雒", - u"陽", - u"漿", - u"麵", - u"條", - u"部", - u"←", - u"柜", - u"驴", - u"证", - u"票", - u"账", - u"汗", - u"汆", - u"稍", - u"戏", - u"菋", - u"卫", - u"匹", - u"栋", - u"馨", - u"肯", - u"迪", - u"邢", - u"梯", - u"容", - u"嘉", - u"莞", - u"袁", - u"锦", - u"遮", - u"雨", - u"篷", - u"腰", - u"肺", - u"剡", - u"乾", - u",", - u"翰", - u"蔚", - u"刁", - u"藤", - u"帅", - u"傳", - u"维", - u"笔", - u"历", - u"史", - u"】", - u"适", - u"煌", - u"倾", - u"沧", - u"姬", - u"训", - u"邵", - u"诺", - u"敢", - u"质", - u"益", - u"佬", - u"兼", - u"职", - u"盅", - u"诊", - u"扬", - u"速", - u"宝", - u"褚", - u"糁", - u"钢", - u"松", - u"婚", - u"秀", - u"盐", - u"及", - u"個", - u"飲", - u"绍", - u"槿", - u"觅", - u"逼", - u"兽", - u"》", - u"吐", - u"右", - u"久", - u"闺", - u"祝", - u"贺", - u"啦", - u"瓦", - u"甏", - u"探", - u"辰", - u"碚", - u"芳", - u"灣", - u"泷", - u"饰", - u"隔", - u"帐", - u"飮", - u"搜", - u"時", - u"宫", - u"蘭", - u"再", - u"糊", - u"仓", - u"稻", - u"玉", - u"印", - u"象", - u"稀", - u"拴", - u"桩", - u"餃", - u"贾", - u"贱", - u"球", - u"萌", - u"撕", - u"脂", - u"肪", - u"层", - u"晋", - u"荷", - u"钱", - u"潍", - u"失", - u"孜", - u"提", - u"供", - u"具", - u"洛", - u"涂", - u"叠", - u"豊", - u"积", - u"媒", - u"级", - u"纷", - u"巧", - u"瓜", - u"苹", - u"琥", - u"珀", - u"蜂", - u"柚", - u"莉", - u"爆", - u"龄", - u"饸", - u"饹", - u"郞", - u"嫡", - u"億", - u"姚", - u"繁", - u"监", - u"督", - u"示", - u"佰", - u"汍", - u"%", - u"甘", - u"蔗", - u"喻", - u"骄", - u"基", - u"因", - u"匙", - u"评", - u"侠", - u"赢", - u"交", - u"歡", - u"待", - u"馒", - u"产", - u"倡", - u"导", - u"低", - u"茂", - u"沐", - u"熙", - u"延", - u"丧", - u"受", - u"确", - u"睡", - u"蓝", - u"未", - u"賣", - u"電", - u"話", - u"农", - u"札", - u"岗", - u"树", - u"赖", - u"琪", - u"驻", - u"辉", - u"软", - u"防", - u"盗", - u"隐", - u"形", - u"纱", - u"灶", - u"扎", - u"环", - u"禁", - u"止", - u"吸", - u"萬", - u"昆", - u"几", - u"跳", - u"媳", - u"婦", - u"坛", - u"<", - u">", - u"拿", - u"妖", - u"协", - u"朱", - u"住", - u"宿", - u"魅", - u"照", - u"碰", - u"滴", - u"何", - u"贤", - u"棒", - u"持", - u"啊", - u"赛", - u"版", - u"帆", - u"順", - u"狗", - u"情", - u"+", - u"洞", - u"奋", - u"斗", - u"亨", - u"叶", - u"涛", - u"铝", - u"范", - u"汀", - u"號", - u"律", - u"價", - u"鞭", - u"肩", - u"#", - u"愚", - u"奥", - u"脯", - u"沁", - u"奚", - u"魏", - u"批", - u"租", - u"宠", - u"炲", - u"横", - u"沥", - u"彪", - u"投", - u"诉", - u"犀", - u"去", - u"屠", - u"鲅", - u"~", - u"俱", - u"徒", - u"鴻", - u"劉", - u"迷", - u"荤", - u"威", - u"曜", - u"連", - u"鎖", - u"馳", - u"载", - u"添", - u"筑", - u"陵", - u"佐", - u"敦", - u">", - u"郭", - u"厢", - u"祛", - u"茄", - u"堰", - u"漂", - u"亮", - u"爅", - u"虎", - u"膀", - u"叼", - u"猫", - u"藏", - u"陶", - u"鲈", - u"栏", - u"…", - u"考", - u"冲", - u"胖", - u"裕", - u"沃", - u"挂", - u"报", - u"兔", - u"胶", - u"臨", - u"附", - u"处", - u"嫂", - u"萃", - u"幂", - u"吻", - u"聪", - u"糯", - u"糍", - u"棋", - u"烓", - u"脊", - u"衡", - u"亚", - u"副", - u"肤", - u"荆", - u"榴", - u"绚", - u"黔", - u"圈", - u"纳", - u"课", - u"逸", - u"宜", - u"=", - u"烊", - u"姨", - u"施", - u"救", - u"贸", - u"啥", - u"也", - u"贯", - u"雷", - u"呆", - u"棠", - u"伙", - u"岐", - u"宛", - u"媽", - u"寸", - u"澳", - u"已", - u"還", - u"兒", - u"Ⅱ", - u"凯", - u"株", - u"藕", - u"闽", - u"窖", - u"瀘", - u"售", - u"索", - u"体", - u"型", - u"樂", - u"琅", - u"琊", - u"夺", - u"扩", - u")", - u"诱", - u"滩", - u"浓", - u"要", - u"芹", - u"君", - u"反", - u"复", - u"羔", - u"追", - u"演", - u"唱", - u"過", - u"綫", - u"乳", - u"涩", - u"芒", - u"露", - u"蒙", - u"羯", - u"励", - u"志", - u"嵊", - u"閒", - u"罐", - u"佛", - u"墙", - u"頁", - u"坐", - u"眯", - u"预", - u"華", - u"廉", - u"释", - u"必", - u"随", - u"逐", - u"引", - u"究", - u"爸", - u"灵", - u"勺", - u"岂", - u"俵", - u"廷", - u"苗", - u"岭", - u"将", - u"來", - u"泛", - u"朵", - u"維", - u"園", - u"廳", - u"圳", - u"伦", - u"寶", - u"付", - u"仅", - u"減", - u"谦", - u"硕", - u"抚", - u"慶", - u"雞", - u"郝", - u"计", - u"熱", - u"杖", - u"亭", - u"喱", - u"惜", - u"莒", - u"另", - u"陆", - u"拾", - u"伍", - u"谈", - u"嚼", - u"娅", - u"翟", - u"別", - u"颈", - u"邮", - u"弄", - u"•", - u"扇", - u"哦", - u"吼", - u"耶", - u"宅", - u"帽", - u"魂", - u"搭", - u"笨", - u"映", - u"拨", - u"烂", - u"馈", - u"胎", - u"溶", - u"\\", - u"善", - u"销", - u"难", - u"忘", - u"斑", - u"噢", - u"錫", - u"娟", - u"語", - u"哨", - u"筷", - u"摊", - u"均", - u"椅", - u"改", - u"换", - u"跟", - u"帖", - u"勾", - u"缅", - u"孙", - u"啪", - u"栗", - u"着", - u"漁", - u"吓", - u"易", - u"漲", - u"靖", - u"枸", - u"馬", - u"昇", - u"當", - u"麥", - u"妆", - u"塑", - u"魯", - u"鎮", - u"吗", - u"魁", - u"丹", - u"杈", - u"技", - u"术", - u"泼", - u"零", - u"忙", - u"漾", - u"創", - u"攀", - u"郫", - u"抿", - u"稼", - u"假", - u"循", - u"泳", - u"池", - u"膨", - u"巨", - u"歧", - u"愛", - u"鵝", - u"悉", - u"灯", - u"激", - u"踪", - u"细", - u"會", - u"舔", - u"愿", - u"們", - u"衹", - u"令", - u"浔", - u"丨", - u"酉", - u"惦", - u"耕", - u"×", - u"闪", - u"經", - u"玺", - u"芯", - u"襄", - u"賦", - u"予", - u"學", - u"苑", - u"托", - u"丢", - u"赔", - u"ā", - u"聽", - u"濤", - u"浮", - u"伯", - u"兑", - u"币", - u"治", - u"愈", - u"盱", - u"眙", - u"漏", - u"夕", - u"搏", - u"由", - u"完", - u"切", - u"罕", - u"息", - u"燃", - u"叙", - u"萍", - u"碑", - u"腌", - u"衣", - u"害", - u"己", - u"患", - u"浙", - u"闫", - u"|", - u"芈", - u"谣", - u"戴", - u"錦", - u"謝", - u"恩", - u"芊", - u"拇", - u"矾", - u"政", - u"锣", - u"跃", - u"钥", - u"寺", - u"驼", - u"芙", - u"插", - u"恒", - u"咪", - u"禄", - u"摩", - u"轮", - u"譚", - u"鴨", - u"戊", - u"申", - u"丙", - u"邊", - u"唯", - u"登", - u"困", - u"貢", - u"誉", - u"賀", - u"认", - u"准", - u"妃", - u"潜", - u"旨", - u"死", - u"桌", - u"尧", - u"箱", - u"届", - u"获", - u"顶", - u"柿", - u"臂", - u"蓮", - u"凭", - u"慵", - u"懒", - u"醇", - u"籍", - u"静", - u"淌", - u"此", - u"甚", - u"绣", - u"渌", - u"呢", - u"问", - u"抹", - u"弹", - u"捷", - u"邱", - u"旦", - u"曉", - u"艳", - u"雲", - u"研", - u"守", - u"鼻", - u"¦", - u"揽", - u"含", - u"沂", - u"听", - u"帛", - u"端", - u"兆", - u"舆", - u"谐", - u"帘", - u"笑", - u"寅", - u"【", - u"車", - u"@", - u"&", - u"胪", - u"臻", - u"蘆", - u"衙", - u"餌", - u"①", - u"鉴", - u"敬", - u"枝", - u"沈", - u"衔", - u"蝉", - u"芜", - u"烈", - u"库", - u"椿", - u"稳", - u"’", - u"豌", - u"亿", - u"缙", - u"獨", - u"菊", - u"沤", - u"迟", - u"忧", - u"沫", - u"伟", - u"靠", - u"并", - u"互", - u"晓", - u"枫", - u"窑", - u"芭", - u"夯", - u"鸿", - u"無", - u"烦", - u"恼", - u"闖", - u"贞", - u"鳥", - u"厦", - u"抱", - u"歐", - u"藝", - u"廖", - u"振", - u"腦", - u"舖", - u"酪", - u"碎", - u"浪", - u"荔", - u"巫", - u"撈", - u"醬", - u"段", - u"昔", - u"潘", - u"Λ", - u"禧", - u"妻", - u"瓢", - u"柏", - u"郁", - u"暹", - u"兮", - u"娃", - u"敏", - u"進", - u"距", - u"离", - u"倪", - u"征", - u"咱", - u"继", - u"责", - u"任", - u"銅", - u"啖", - u"赞", - u"菲", - u"蛇", - u"焰", - u"娜", - u"芮", - u"坦", - u"磅", - u"薛", - u"緣", - u"乔", - u"拱", - u"骚", - u"扰", - u"約", - u"喷", - u"驢", - u"仨", - u"纬", - u"臘", - u"邳", - u"终", - u"喏", - u"扫", - u"除", - u"恶", - u"争", - u"率", - u"‘", - u"肃", - u"雀", - u"鈴", - u"贼", - u"绕", - u"笋", - u"钩", - u"勒", - u"翠", - u"黎", - u"董", - u"澄", - u"境", - u"采", - u"拳", - u"捆", - u"粄", - u"诸", - u"暨", - u"榧", - u"葛", - u"親", - u"戚", - u"访", - u"股", - u"融", - u"潤", - u"寄", - u"递", - u"藩", - u"滇", - u"湛", - u"他", - u"篓", - u"普", - u"撞", - u"莅", - u"但", - u"沟", - u"暑", - u"促", - u"玲", - u"腩", - u"碼", - u"偏", - u"楹", - u"嘎", - u"洒", - u"抛", - u"危", - u"险", - u"损", - u"负", - u"銘", - u"黃", - u"燜", - u"說", - u"杆", - u"称", - u"蹭", - u"聊", - u"妙", - u"滕", - u"曦", - u"肴", - u"萧", - u"颗", - u"剂", - u"義", - u"锋", - u"授", - u"权", - u"著", - u"茴", - u"蒝", - u"侬", - u"顏", - u"菁", - u"擦", - u"鞋", - u"庞", - u"毕", - u"谱", - u"樱", - u"→", - u"綦", - u"舞", - u"蹈", - u"躁", - u"渠", - u"俐", - u"涧", - u"馀", - u"潇", - u"邻", - u"须", - u"藻", - u"纺", - u"织", - u"军", - u"沅", - u"豐", - u"爐", - u"韭", - u"棚", - u"綿", - u"麯", - u"剑", - u"娱", - u"链", - u"锤", - u"炼", - u"献", - u"晟", - u"章", - u"謎", - u"数", - u"侯", - u"她", - u"疗", - u"途", - u"篇", - u"则", - u"邓", - u"赐", - u"閣", - u"對", - u"猩", - u"邑", - u"區", - u"鬼", - u"莫", - u"沪", - u"淼", - u"赤", - u"混", - u"沌", - u"需", - u"求", - u"痛", - u"绮", - u"琦", - u"荃", - u"熳", - u"佑", - u"Á", - u"ō", - u"現", - u"専", - u"卢", - u"譽", - u"缠", - u"曾", - u"鸣", - u"琴", - u"汊", - u"濮", - u"哇", - u"哩", - u"唝", - u"曲", - u"坂", - u"呼", - u"莴", - u"怕", - u"蒋", - u"伞", - u"炙", - u"燻", - u"瑧", - u"冈", - u"讲", - u"硬", - u"详", - u"鹵", - u"摇", - u"偃", - u"嵩", - u"严", - u"谨", - u"′", - u"剥", - u"穗", - u"榮", - u"禹", - u"颐", - u"局", - u"刚", - u"▕", - u"暖", - u"漠", - u"炎", - u"頤", - u"樟", - u"?", - u"储", - u"移", - u"缕", - u"艰", - u"袍", - u"瑪", - u"麗", - u"参", - u"䬺", - u"趁", - u"呦", - u"霖", - u"饵", - u"溪", - u"孔", - u"澤", - u"袜", - u"蔓", - u"熠", - u"显", - u"屏", - u"缇", - u"寇", - u"亞", - u"坑", - u"槟", - u"榔", - u"絳", - u"驿", - u"歹", - u"匾", - u"猴", - u"旭", - u"竞", - u"­", - u"唛", - u"介", - u"习", - u"涡", - u"寓", - u"掉", - u"蘸", - u"愉", - u"佼", - u"ǒ", - u"納", - u"∶", - u"革", - u"嚸", - u"募", - u"螃", - u"鲢", - u"俤", - u"扁", - u"寳", - u"辽", - u"∧", - u"厚", - u"裤", - u"扯", - u"屯", - u"废", - u"挪", - u"辘", - u"碉", - u"歇", - u"漓", - u"腻", - u"捣", - u"孩", - u"烁", - u"整", - u"按", - u"Ⓡ", - u"眉", - u"脸", - u"痣", - u"粑", - u"序", - u"穿", - u"樊", - u"玮", - u"★", - u"扑", - u"渊", - u"醴", - u"瑶", - u"農", - u"檔", - u"憩", - u"霊", - u"赫", - u"呜", - u"~", - u"备", - u"説", - u"莓", - u"钻", - u"播", - u"冻", - u"紅", - u"菽", - u"喪", - u"埔", - u"壽", - u"❤", - u"籽", - u"咻", - u"籣", - u"尹", - u"潭", - u"穆", - u"壮", - u"使", - u"霄", - u"蔵", - u"浒", - u"岳", - u"熘", - u"臺", - u"殷", - u"孤", - u"邂", - u"逅", - u"厕", - u"郸", - u"铭", - u"莆", - u"抻", - u"虽", - u"倦", - u"怠", - u"矣", - u"茵", - u"垂", - u"殿", - u"鄂", - u"嗑", - u"续", - u"钦", - u"党", - u"鲫", - u"蔡", - u"侧", - u"割", - u"彰", - u"凝", - u"熬", - u"叕", - u"純", - u"谛", - u"籠", - u"宋", - u"峡", - u"俩", - u"雜", - u"跑", - u"⑧", - u"焼", - u"-", - u"逢", - u"澧", - u"舵", - u"异", - u"冯", - u"战", - u"决", - u"棍", - u";", - u"﹣", - u"丑", - u"妇", - u"焉", - u"芷", - u"楂", - u"坞", - u"壳", - u"馐", - u"帜", - u"旅", - u"鳯", - u"簡", - u"凍", - u"秜", - u"结", - u"咩", - u"丫", - u"稠", - u"暗", - u"缔", - u"乎", - u"被", - u"狠", - u"皲", - u"豉", - u"崇", - u"渭", - u"担", - u"鹤", - u"製", - u"蛎", - u"笛", - u"奔", - u"赴", - u"盼", - u"鳌", - u"拜", - u"络", - u"灸", - u"膜", - u"刮", - u"痧", - u"毒", - u"萊", - u"陂", - u"濑", - u"唇", - u"抵", - u"押", - u"置", - u"馇", - u"泌", - u"尿", - u"傻", - u"像", - u"孃", - u"陣", - u"靓", - u"规", - u"企", - u"矮", - u"凳", - u"贰", - u"兎", - u"庵", - u"質", - u"阅", - u"读", - u"◆", - u"练", - u"墩", - u"曼", - u"呱", - u"泓", - u"耐", - u"磁", - u"枣", - u"罉", - u"浴", - u"氧", - u"洱", - u"鳅", - u"線", - u"炳", - u"顽", - u"符", - u"倌", - u"泥", - u"郊", - u"柯", - u"餘", - u"巍", - u"论", - u"沽", - u"荘", - u"奕", - u"啃", - u"髙", - u"○", - u"芬", - u"苟", - u"且", - u"阆", - u"確", - u"獅", - u"匣", - u"睫", - u"牙", - u"戒", - u"俊", - u"阜", - u"遵", - u"爵", - u"遗", - u"捧", - u"仑", - u"构", - u"豬", - u"挡", - u"弓", - u"蠔", - u"旬", - u"鱻", - u"镖", - u"燚", - u"歌", - u"壁", - u"啫", - u"饷", - u"仰", - u"韶", - u"勞", - u"軒", - u"菒", - u"炫", - u"廊", - u"塞", - u"脏", - u"堤", - u"浅", - u"辈", - u"靡", - u"裙", - u"尺", - u"廚", - u"向", - u"磊", - u"咬", - u"皓", - u"卿", - u"懂", - u"葉", - u"廿", - u"芸", - u"賴", - u"埠", - u"應", - u"碟", - u"溧", - u"訂", - u"選", - u"睦", - u"举", - u"钳", - u"哟", - u"霍", - u"扞", - u"侣", - u"營", - u"龟", - u"钜", - u"埭", - u"が", - u"搽", - u"螞", - u"蟻", - u"娚", - u"蒜", - u"厝", - u"垵", - u"☎", - u"捌", - u"倒", - u"骑", - u"Ξ", - u"谋", - u"黍", - u"侍", - u"赏", - u"扮", - u"忱", - u"蘑", - u"洁", - u"嘆", - u"闹", - u"谭", - u"鶏", - u"種", - u"φ", - u"坤", - u"麓", - u"麒", - u"麟", - u"喂", - u"琳", - u"Ⓑ", - u"趙", - u"總", - u"這", - u"奖", - u"取", - u"拔", - u"錯", - u"仉", - u"缸", - u"廟", - u"暢", - u"腔", - u"卓", - u"腱", - u"朙", - u"紹", - u"莹", - u"缺", - u"抺", - u"睿", - u"氣", - u"该", - u"貼", - u"妍", - u"拆", - u"穇", - u"箩", - u"希", - u"廰", - u"祗", - u"盲", - u"坝", - u"骆", - u"熄", - u"蛮", - u"賓", - u"馮", - u"尋", - u"泊", - u"孫", - u"槁", - u"亖", - u"俯", - u"浣", - u"婴", - u"锨", - u"馥", - u"闷", - u"梆", - u"▫", - u"姥", - u"哲", - u"录", - u"甫", - u"床", - u"嬌", - u"烎", - u"梵", - u"枪", - u"乍", - u"璜", - u"羌", - u"崂", - u"穷", - u"榕", - u"聲", - u"喚", - u"駕", - u"晕", - u"嬷", - u"箕", - u"婧", - u"盧", - u"楓", - u"柃", - u"差", - u"「", - u"」", - u"佶", - u"唔", - u"壕", - u"歆", - u"盏", - u"擂", - u"睇", - u"巾", - u"查", - u"淖", - u"哪", - u"沣", - u"赣", - u"優", - u"諾", - u"礁", - u"努", - u"畔", - u"疙", - u"瘩", - u"握", - u"叮", - u"栙", - u"甑", - u"嶺", - u"涌", - u"透", - u"钓", - u"斜", - u"搬", - u"迁", - u"妨", - u"借", - u"仍", - u"鳕", - u"瓷", - u"绘", - u"餠", - u"á", - u"ǎ", - u"祈", - u"邨", - u"醒", - u"闵", - u"砖", - u"锹", - u"咀", - u"綠", - u"幕", - u"忠", - u"雾", - u"覓", - u"靜", - u"擔", - u"篮", - u"杉", - u"势", - u"薇", - u"甬", - u"频", - u"般", - u"仲", - u"蘇", - u"鸟", - u"卞", - u"憾", - u"資", - u"駱", - u"蝶", - u"為", - u"仟", - u"耗", - u"莘", - u"涉", - u"昕", - u"盈", - u"熹", - u"觀", - u"瑭", - u"湃", - u"兢", - u"淞", - u"䒩", - u"結", - u"柗", - u"鲤", - u"糟", - u"粕", - u"塗", - u"簽", - u"怎", - u"桐", - u"皆", - u"羽", - u"盯", - u"氽", - u"晏", - u"液", - u"镀", - u"珂", - u"悸", - u"∙", - u"桑", - u"夢", - u"楽", - u"剩", - u"纵", - u"逝", - u"欺", - u"統", - u"飛", - u"姣", - u"俄", - u"揪", - u"薡", - u"幅", - u"蓋", - u"︳", - u"屉", - u"㕔", - u"а", - u"铸", - u"韦", - u"銀", - u"檀", - u"击", - u"伿", - u"隍", - u"『", - u"』", - u"芥", - u"☆", - u"声", - u"跆", - u"肋", - u"榭", - u"牵", - u"棧", - u"網", - u"愁", - u"嗏", - u"嵗", - u"巡", - u"稚", - u"貴", - u"買", - u"恰", - u"㸆", - u"捻", - u"玫", - u"瑰", - u"炕", - u"梧", - u"餡", - u"锌", - u"焱", - u"驰", - u"堽", - u"邯", - u"珑", - u"尕", - u"宰", - u"栓", - u"喃", - u"殊", - u"燊", - u"慈", - u"羴", - u"逃", - u"脱", - u"邹", - u"檐", - u"碌", - u"页", - u"荠", - u"券", - u"題", - u"龚", - u"肌", - u"蕉", - u"囬", - u"肫", - u"坪", - u"沉", - u"淀", - u"斌", - u"鳝", - u"核", - u"喳", - u"剃", - u"昭", - u"{", - u"}", - u"坏", - u"烜", - u"媛", - u"猛", - u"桓", - u"欣", - u"碁", - u"竭", - u"堇", - u"↑", - u"扛", - u"罄", - u"栾", - u"鲶", - u"鍕", - u"崔", - u"橘", - u"携", - u"丈", - u"射", - u"梗", - u"檸", - u"疼", - u"卑", - u"捉", - u"障", - u"裏", - u"遍", - u"蓓", - u"析", - u"許", - u"虫", - u"坨", - u"馔", - u"窄", - u"姫", - u"噤", - u"係", - u"湿", - u"汐", - u"鳜", - u"船", - u"崽", - u"+", - u"例", - u"灼", - u"祿", - u"腥", - u"峭", - u"酌", - u"喽", - u"件", - u"郏", - u"栀", - u"鲨", - u"寫", - u"與", - u"诈", - u"斥", - u"炮", - u"稿", - u"懿", - u"掂", - u"鹭", - u"乱", - u"恬", - u"婷", - u"苦", - u"埃", - u"珊", - u"禅", - u"裹", - u"圃", - u"鹌", - u"鹑", - u"û", - u"澡", - u"囧", - u"阡", - u"靑", - u"警", - u"牢", - u"嘱", - u"鳞", - u"浃", - u"贷", - u"慧", - u"翊", - u"讨", - u"碧", - u"剪", - u"陌", - u"冀", - u"砵", - u"迅", - u"鹰", - u"竟", - u"召", - u"敌", - u"鯡", - u"蒌", - u"蒿", - u"扶", - u"③", - u"誘", - u"嘻", - u"輪", - u"嬢", - u"瓮", - u"絲", - u"嚣", - u"荀", - u"莽", - u"鄧", - u"咋", - u"勿", - u"佈", - u"洽", - u"羹", - u"模", - u"貨", - u"粱", - u"凈", - u"腹", - u"鄭", - u"署", - u"儒", - u"隧", - u"鉢", - u"茫", - u"蔻", - u"í", - u"ó", - u"裴", - u"偉", - u"Θ", - u"祎", - u"褥", - u"殖", - u"湫", - u"瀚", - u"貓", - u"汪", - u"紙", - u"極", - u"伤", - u"灰", - u"團", - u"橄", - u"榄", - u"拽", - u"响", - u"貌", - u"傣", - u"舂", - u"斩", - u"飨", - u"执", - u"諸", - u"蒂", - u"嘣", - u"葡", - u"渤", - u"惺", - u"驛", - u"戰", - u"箬", - u"俭", - u"瀏", - u"嫦", - u"琵", - u"琶", - u"咿", - u"吖", - u"舱", - u"韵", - u"揭", - u"祁", - u"將", - u"軍", - u"吟", - u"彼", - u"岚", - u"绒", - u"煤", - u"淝", - u"歸", - u"锐", - u"嗯", - u"傾", - u"甩", - u"瞳", - u"睁", - u"鳗", - u"遜", - u"嗲", - u"虚", - u"娴", - u"碱", - u"呷", - u"{", - u"哚", - u"兜", - u"喇", - u"叭", - u"燦", - u"逻", - u"匪", - u"槐", - u"撒", - u"写", - u"踩", - u"踏", - u"霞", - u"喫", - u"返", - u"赚", - u"拓", - u"動", - u"觞", - u"鲽", - u"鐘", - u"闰", - u"扳", - u"沖", - u"賈", - u"璐", - u"煸", - u"棵", - u"峪", - u"π", - u"憶", - u"齋", - u"娇", - u"穎", - u"嫁", - u"玥", - u"胚", - u"喊", - u"阻", - u"餓", - u"截", - u"孵", - u"屎", - u"爾", - u"莳", - u"倔", - u"娄", - u"祸", - u"`", - u"姿", - u"稽", - u"戌", - u"缪", - u"ī", - u"糠", - u"痴", - u"猎", - u"嬉", - u"柑", - u"鞍", - u"兹", - u"凼", - u"舅", - u"褐", - u"醪", - u"仪", - u"氷", - u"單", - u"丞", - u"碛", - u"绽", - u"袂", - u"檢", - u"瀾", - u"饃", - u"孖", - u"雍", - u"ò", - u"螄", - u"涤", - u"茨", - u"寮", - u"近", - u"辜", - u"茅", - u"孟", - u"累", - u"宣", - u"樹", - u"鷹", - u"膝", - u"臉", - u"襪", - u"嘢", - u"嵐", - u"▲", - u"璇", - u"竺", - u"気", - u"迈", - u"糐", - u"挥", - u"瑜", - u"伽", - u"\"", - u"裳", - u"纹", - u"潯", - u"幾", - u"朔", - u"枊", - u"釀", - u"劝", - u"俺", - u"粢", - u"馓", - u"胥", - u"拥", - u"嘶", - u"達", - u"蝴", - u"昱", - u"ホ", - u"ル", - u"モ", - u"ニ", - u"颂", - u"噫", - u"否", - u"笙", - u"绎", - u"俞", - u"泵", - u"测", - u"耿", - u"揚", - u"犇", - u"锄", - u"卧", - u"炯", - u"烽", - u"橡", - u"操", - u"齊", - u"隴", - u"宀", - u"荥", - u"滙", - u"贪", - u"関", - u"垦", - u"↓", - u"麽", - u"暧", - u"匯", - u"恨", - u"叽", - u"断", - u"鮪", - u"椎", - u"病", - u"迹", - u"禺", - u"搓", - u"瀛", - u"唤", - u"埕", - u"愤", - u"怒", - u"拐", - u"狱", - u"垅", - u"绅", - u"設", - u"計", - u"書", - u"楷", - u"鮨", - u"邪", - u"郴", - u"盞", - u"榆", - u"恺", - u"樵", - u"煙", - u"舫", - u"翡", - u"砸", - u"叹", - u"縣", - u"璞", - u"禮", - u"獻", - u"似", - u"吆", - u"嘛", - u"灭", - u"擇", - u"夥", - u"ē", - u"曰", - u"蜗", - u"櫻", - u"▏", - u"鑪", - u"鯊", - u"視", - u"淄", - u"钰", - u"〝", - u"〞", - u"報", - u"退", - u"壶", - u"鳴", - u"拒", - u"旱", - u"鼠", - u"蕴", - u"峧", - u"赶", - u"咏", - u"寬", - u"渎", - u"靣", - u"卟", - u"宙", - u"趟", - u"負", - u"镫", - u"讷", - u"迭", - u"彝", - u"樣", - u"輕", - u"却", - u"覆", - u"庖", - u"扉", - u"聖", - u"喬", - u"瞻", - u"瞿", - u"箭", - u"胆", - u"ε", - u"韧", - u"誌", - u"既", - u"淳", - u"饞", - u"ě", - u"圍", - u"墟", - u"俚", - u"翕", - u"貂", - u"畜", - u"緹", - u"搄", - u"旮", - u"旯", - u"寂", - u"寞", - u"詹", - u"茜", - u"鉄", - u"絕", - u"泸", - u"嬤", - u"允", - u"炘", - u"骏", - u"侑", - u"晒", - u"玄", - u"粧", - u"糘", - u"毫", - u"幽", - u"攸", - u"愧", - u"侨", - u"衰", - u"ぉ", - u"に", - u"き", - u"ぃ", - u"炽", - u"倉", - u"斛", - u"領", - u"盾", - u"窜", - u"鲷", - u"瓏", - u"媚", - u"爲", - u"裸", - u"窦", - u"虞", - u"處", - u"魷", - u"}", - u"羡", - u"冕", - u"祺", - u"裁", - u"粶", - u"䬴", - u"嚟", - u"辆", - u"撮", - u"隋", - u"'", - u"勝", - u"梭", - u"茸", - u"咭", - u"崟", - u"滷", - u"緻", - u"沩", - u"颠", - u"诠", - u"珺", - u"拙", - u"察", - u"≡", - u"辅", - u"父", - u"雁", - u"裱", - u"瞄", - u"漖", - u"鯨", - u"略", - u"橱", - u"帼", - u"棉", - u"濠", - u"蕃", - u"ǔ", - u"崮", - u"阮", - u"勋", - u"苍", - u"喔", - u"猜", - u"箔", - u"è", - u"雏", - u"睐", - u"袭", - u"皋", - u"彻", - u"売", - u"垚", - u"咯", - u"凑", - u"汴", - u"纽", - u"巩", - u"宸", - u"墅", - u"茏", - u"裡", - u"昧", - u"飽", - u"坯", - u"濟", - u"└", - u"┐", - u"懷", - u"霾", - u"´", - u"閑", - u"茹", - u"闳", - u"湶", - u"鈣", - u"圓", - u"昊", - u"眞", - u"標", - u"凖", - u"皱", - u"箍", - u"筹", - u"孬", - u"唠", - u"輝", - u"输", - u"綺", - u"驭", - u"哼", - u"匡", - u"偵", - u"蝇", - u"運", - u"漟", - u"乘", - u"Ē", - u"卉", - u"邴", - u"謠", - u"怿", - u"亁", - u"棱", - u"呐", - u"湄", - u"莜", - u"阶", - u"堔", - u"炜", - u"邀", - u"笠", - u"遏", - u"犯", - u"罪", - u"栢", - u"餛", - u"亀", - u"苓", - u"膏", - u"伸", - u"?", - u"阪", - u"委", - u"妯", - u"娌", - u"仝", - u"咧", - u"鍚", - u"▼", - u"遠", - u"摑", - u"滘", - u"颁", - u"ʌ", - u"锈", - u"佤", - u"佗", - u"卌", - u"É", - u"↙", - u"蔺", - u"汰", - u"塍", - u"認", - u"鳟", - u"畿", - u"耦", - u"吨", - u"䒕", - u"茬", - u"枼", - u"饕", - u"涼", - u"烀", - u"汶", - u"齿", - u"貳", - u"沱", - u"楞", - u"屹", - u"掺", - u"挢", - u"荻", - u"偷", - u"辶", - u"饌", - u"泮", - u"喧", - u"某", - u"聂", - u"夾", - u"吁", - u"鎬", - u"谅", - u"鞘", - u"泪", - u"佩", - u"㎡", - u"鐡", - u"犊", - u"漳", - u"睢", - u"粘", - u"輔", - u"爬", - u"濃", - u"し", - u"ん", - u"い", - u"ち", - u"ょ", - u"く", - u"ど", - u"ぅ", - u"戍", - u"咚", - u"蒡", - u"惯", - u"隣", - u"沭", - u"撇", - u"妞", - u"筛", - u"昵", - u"赁", - u"震", - u"欠", - u"涞", - u"從", - u"靚", - u"绥", - u"俑", - u"熔", - u"曙", - u"侗", - u"√", - u"仗", - u"袖", - u"饶", - u"辫", - u"琉", - u"鴿", - u"裂", - u"缝", - u"灞", - u"崖", - u"炑", - u"昝", - u"┌", - u"┘", - u"邕", - u"趴", - u"踢", - u"迩", - u"浈", - u"挚", - u"聆", - u"犁", - u"陝", - u"滾", - u"彎", - u"問", - u"癮", - u"砚", - u"ú", - u"瀧", - u"吮", - u"毓", - u"劵", - u"槽", - u"黒", - u"忍", - u"畈", - u"姊", - u"沛", - u"忽", - u"摘", - u"燍", - u"♡", - u"汝", - u"贛", - u"叻", - u"甸", - u"乞", - u"丐", - u"践", - u"嗞", - u"㥁", - u"斐", - u"圖", - u"祯", - u"牤", - u"攻", - u"弯", - u"幹", - u"杠", - u"苞", - u"滤", - u"筆", - u"練", - u"鞑", - u"ˊ", - u"萤", - u"榶", - u"叨", - u"轨", - u"耒", - u"嚮", - u"┃", - u"漪", - u"剛", - u"键", - u"弋", - u"彦", - u"瘋", - u"词", - u"敖", - u"鸦", - u"秧", - u"囚", - u"绾", - u"镶", - u"濂", - u"↘", - u"豁", - u"煒", - u"萄", - u"珲", - u"緋", - u"昂", - u"瀨", - u"缓", - u"疲", - u"替", - u"汥", - u"殡", - u"葬", - u"靳", - u"揉", - u"闭", - u"睛", - u"偘", - u"佚", - u"$", - u";", - u"^"}; - - if (name == "CHARSET_EN_36") - return CHARSET_EN_36; - else if (name == "CHARSET_CH_94") - return CHARSET_CH_94; - else if (name == "CHARSET_CN_3944") - return CHARSET_CN_3944; - CV_Error(-1, "Charset not supported! Exiting ..."); -} - diff --git a/models/text_recognition_crnn/crnn.py b/models/text_recognition_crnn/crnn.py deleted file mode 100644 index 20860c5f..00000000 --- a/models/text_recognition_crnn/crnn.py +++ /dev/null @@ -1,4176 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import numpy as np -import cv2 as cv - -class CRNN: - def __init__(self, modelPath, backendId=0, targetId=0): - self._model_path = modelPath - self._backendId = backendId - self._targetId = targetId - - self._model = cv.dnn.readNet(self._model_path) - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - # load charset by the name of model - if '_EN_' in self._model_path: - self._charset = self._load_charset(self.CHARSET_EN_36) - elif '_CH_' in self._model_path: - self._charset = self._load_charset(self.CHARSET_CH_94) - elif '_CN_' in self._model_path: - self._charset = self._load_charset(self.CHARSET_CN_3944) - else: - print('Charset not supported! Exiting ...') - exit() - - self._inputSize = [100, 32] # Fixed - self._targetVertices = np.array([ - [0, self._inputSize[1] - 1], - [0, 0], - [self._inputSize[0] - 1, 0], - [self._inputSize[0] - 1, self._inputSize[1] - 1] - ], dtype=np.float32) - - @property - def name(self): - return self.__class__.__name__ - - def _load_charset(self, charset): - return ''.join(charset.splitlines()) - - def setBackendAndTarget(self, backendId, targetId): - self._backendId = backendId - self._targetId = targetId - self._model.setPreferableBackend(self._backendId) - self._model.setPreferableTarget(self._targetId) - - def _preprocess(self, image, rbbox): - # Remove conf, reshape and ensure all is np.float32 - vertices = rbbox.reshape((4, 2)).astype(np.float32) - - rotationMatrix = cv.getPerspectiveTransform(vertices, self._targetVertices) - cropped = cv.warpPerspective(image, rotationMatrix, self._inputSize) - - # 'CN' can detect digits (0\~9), upper/lower-case letters (a\~z and A\~Z), and some special characters - # 'CH' can detect digits (0\~9), upper/lower-case letters (a\~z and A\~Z), some Chinese characters and some special characters - if 'CN' in self._model_path or 'CH' in self._model_path: - pass - else: - cropped = cv.cvtColor(cropped, cv.COLOR_BGR2GRAY) - - return cv.dnn.blobFromImage(cropped, size=self._inputSize, mean=127.5, scalefactor=1 / 127.5) - - def infer(self, image, rbbox): - # Preprocess - inputBlob = self._preprocess(image, rbbox) - - # Forward - self._model.setInput(inputBlob) - outputBlob = self._model.forward() - - # Postprocess - results = self._postprocess(outputBlob) - - return results - - def _postprocess(self, outputBlob): - '''Decode charaters from outputBlob - ''' - text = '' - for i in range(outputBlob.shape[0]): - c = np.argmax(outputBlob[i][0]) - if c != 0: - text += self._charset[c - 1] - else: - text += '-' - - # adjacent same letters as well as background text must be removed to get the final output - char_list = [] - for i in range(len(text)): - if text[i] != '-' and (not (i > 0 and text[i] == text[i - 1])): - char_list.append(text[i]) - return ''.join(char_list) - - CHARSET_EN_36 = '''0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z''' - - CHARSET_CH_94 = ''' -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -A -B -C -D -E -F -G -H -I -J -K -L -M -N -O -P -Q -R -S -T -U -V -W -X -Y -Z -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -{ -| -} -~''' - - CHARSET_CN_3944 = ''' -H -O -K -I -T -E -A -酱 -鸭 -传 -奇 -J -N -G -Y -C -U -Q -蝦 -兵 -蟹 -煲 -这 -是 -可 -以 -先 -吃 -后 -涮 -的 -干 -锅 -菜 -加 -盟 -电 -话 -: -1 -7 -3 -9 -8 -郑 -州 -总 -店 -雪 -花 -勇 -闯 -天 -涯 -虾 -, -一 -送 -鱼 -锡 -纸 -蛤 -土 -豆 -粉 -砂 -米 -线 -牛 -筋 -面 -刀 -削 -水 -饺 -吧 -沙 -拉 -老 -饭 -盒 -教 -室 -主 -题 -餐 -厅 -仁 -馄 -饨 -重 -庆 -小 -便 -当 -全 -国 -连 -锁 -4 -0 -- -6 -5 -2 -人 -快 -量 -贩 -蓬 -朗 -御 -茶 -川 -渝 -捞 -火 -古 -之 -匠 -今 -七 -西 -域 -羊 -城 -l -i -k -n -g -c -o -f -e -w -贵 -阳 -素 -有 -家 -会 -展 -口 -乐 -三 -惹 -烤 -肉 -h -t -子 -馆 -常 -盖 -浇 -兴 -业 -路 -书 -亦 -燒 -仙 -草 -L -: -德 -啤 -工 -坊 -杏 -屋 -高 -桥 -号 -品 -麻 -辣 -烫 -检 -官 -. -千 -翼 -木 -兰 -画 -食 -上 -汤 -剁 -馅 -手 -煮 -时 -尚 -健 -康 -傲 -椒 -B -啵 -条 -脾 -气 -! -/ -月 -腾 -讯 -应 -用 -喵 -泡 -我 -鲜 -滚 -给 -你 -看 -客 -来 -香 -汉 -湘 -本 -地 -炒 -系 -列 -订 -仔 -肘 -蹄 -梅 -扣 -黄 -焖 -排 -骨 -炖 -鸡 -韓 -金 -利 -串 -舊 -街 -梨 -村 -座 -经 -济 -实 -惠 -绿 -色 -炭 -庐 -蛙 -忆 -蓉 -源 -真 -d -D -概 -念 -创 -意 -六 -熏 -各 -种 -精 -美 -y -疯 -狂 -世 -界 -杯 -特 -价 -酒 -元 -瓶 -沸 -带 -F -请 -二 -楼 -自 -动 -升 -降 -烏 -邦 -嗦 -味 -风 -货 -团 -外 -卖 -嘞 -个 -折 -辛 -束 -舌 -尖 -中 -包 -浆 -腐 -r -P -a -u -丸 -作 -福 -M -漫 -蜜 -冰 -拌 -匆 -那 -年 -R -S -果 -光 -夹 -馍 -凉 -皮 -过 -祖 -南 -山 -風 -景 -堂 -烘 -培 -龍 -坎 -半 -婆 -建 -设 -富 -强 -丽 -菏 -泽 -省 -安 -港 -竹 -签 -撩 -只 -为 -好 -生 -活 -抓 -海 -最 -网 -红 -铁 -统 -® -功 -夫 -鱿 -大 -闻 -就 -知 -遇 -见 -文 -合 -热 -森 -台 -湾 -卤 -然 -汁 -甄 -选 -材 -还 -原 -初 -衷 -* -洪 -龙 -公 -酸 -巴 -乡 -焦 -烧 -淘 -成 -都 -眼 -镜 -优 -菓 -恋 -V -化 -糖 -、 -粥 -田 -螺 -斓 -X -爺 -W -j -院 -华 -Z -蜊 -北 -京 -刷 -蝎 -腿 -梦 -幻 -奶 -式 -蛋 -鍋 -区 -· -领 -航 -者 -四 -通 -往 -楚 -河 -停 -车 -场 -凌 -晨 -点 -杞 -缘 -王 -集 -唐 -菠 -萝 -泰 -板 -鳳 -凰 -樓 -名 -壹 -猪 -晴 -舍 -犟 -师 -傅 -飯 -致 -青 -春 -轰 -炸 -卡 -里 -身 -厨 -房 -x -聚 -鑫 -阁 -岛 -纯 -聘 -专 -长 -庄 -鄉 -更 -珍 -固 -新 -岩 -v -s -m -至 -尊 -比 -萨 -广 -披 -饮 -管 -理 -限 -司 -p -幸 -东 -正 -挞 -少 -女 -克 -装 -童 -哒 -磨 -厂 -怼 -纤 -入 -户 -独 -溜 -共 -享 -滋 -江 -门 -九 -蒸 -胜 -盛 -& -魔 -爪 -鹅 -皇 -( -) -友 -甲 -魚 -首 -烹 -行 -员 -若 -资 -议 -联 -同 -急 -私 -燕 -儿 -巢 -鹏 -记 -腊 -营 -欢 -迎 -旗 -舰 -叫 -了 -做 -故 -铃 -煎 -饼 -哥 -力 -五 -谷 -野 -戈 -厠 -所 -超 -牌 -冒 -陳 -陈 -苕 -爽 -滑 -启 -秦 -择 -现 -进 -惊 -喜 -定 -于 -雅 -膳 -多 -推 -淇 -淋 -b -思 -堡 -偶 -相 -伴 -呈 -湯 -绝 -浏 -' -刘 -态 -牧 -万 -达 -和 -番 -丼 -— -机 -瘦 -绵 -柔 -厉 -蚝 -娘 -彩 -百 -事 -调 -韩 -爱 -喝 -玩 -放 -肆 -寿 -净 -配 -髓 -非 -道 -额 -吉 -招 -商 -杂 -粮 -筐 -运 -转 -服 -务 -缤 -灿 -腕 -楠 -彤 -学 -橋 -试 -浩 -减 -薪 -诚 -霸 -第 -间 -日 -极 -料 -開 -業 -霏 -星 -期 -分 -秒 -内 -咨 -询 -。 -樐 -头 -开 -氏 -渔 -约 -劳 -保 -礼 -宏 -武 -佘 -轻 -奢 -艺 -井 -隆 -鐵 -卷 -染 -焙 -钵 -马 -牟 -洋 -芋 -片 -流 -宽 -心 -位 -清 -潼 -关 -祥 -背 -凡 -哈 -尔 -滨 -珠 -派 -艾 -让 -变 -得 -样 -玖 -等 -综 -性 -涵 -粗 -冠 -記 -肠 -湖 -财 -贡 -桃 -杭 -平 -桂 -林 -煨 -档 -案 -造 -潮 -汕 -宗 -单 -县 -鲁 -舜 -脆 -酥 -糕 -仕 -十 -临 -簋 -宴 -字 -太 -灌 -薄 -尝 -址 -晗 -幢 -购 -梁 -醉 -皖 -庭 -白 -肥 -块 -石 -碗 -颜 -值 -張 -瘾 -跷 -脚 -而 -叁 -蜀 -橙 -市 -边 -早 -晚 -云 -吞 -目 -表 -赵 -烩 -擀 -蔬 -找 -回 -游 -刃 -余 -支 -洗 -吹 -休 -闲 -简 -撸 -根 -据 -鸽 -铜 -亲 -贝 -纪 -吕 -豚 -饅 -悦 -汇 -油 -无 -制 -在 -寻 -碳 -馋 -嘴 -架 -荣 -斋 -护 -角 -落 -铺 -臊 -丝 -围 -柳 -蛳 -蒲 -庙 -视 -荐 -缃 -想 -呀 -姜 -母 -起 -泉 -族 -群 -众 -其 -它 -血 -双 -补 -阴 -润 -不 -禽 -类 -款 -较 -候 -些 -畅 -脉 -痰 -疏 -肝 -帮 -助 -消 -增 -欲 -尤 -对 -胃 -畏 -寒 -很 -效 -秘 -黑 -嘿 -佳 -越 -脑 -桶 -项 -▪ -| -榜 -许 -仿 -或 -酬 -宾 -指 -买 -赠 -笃 -鼎 -盆 -™ -咕 -咾 -肚 -识 -栖 -凤 -渡 -筒 -彬 -弟 -醋 -財 -師 -民 -博 -丁 -扒 -翅 -墨 -柠 -檬 -紫 -薯 -焗 -芝 -士 -胸 -图 -妮 -杀 -菌 -爹 -尽 -归 -宁 -粽 -瑞 -轩 -午 -陕 -出 -才 -盘 -植 -甜 -粒 -神 -舟 -玻 -璃 -医 -划 -药 -郡 -毛 -张 -姐 -留 -满 -下 -兄 -法 -鋪 -é -[ -槑 -] -言 -密 -帝 -場 -朴 -寨 -奉 -z -什 -顺 -疆 -馕 -豫 -怀 -旧 -验 -昙 -搞 -圣 -格 -ǐ -à -隱 -燙 -状 -居 -饱 -底 -免 -费 -廣 -點 -專 -門 -语 -叉 -左 -岸 -发 -乌 -齐 -冷 -命 -● -修 -闸 -飞 -空 -养 -笼 -興 -银 -套 -東 -吴 -麺 -館 -¥ -从 -前 -乙 -弘 -炝 -夏 -秋 -冬 -咖 -啡 -℃ -© -莲 -塘 -哆 -梓 -依 -哎 -麦 -泗 -泾 -瓯 -胡 -∣ -歺 -八 -度 -深 -夜 -旋 -永 -远 -温 -又 -晶 -溏 -ä -盔 -飘 -劲 -旺 -楸 -良 -譜 -餅 -苏 -莎 -足 -宵 -与 -楊 -國 -莱 -卜 -炊 -挑 -剔 -存 -错 -方 -程 -解 -能 -堆 -洲 -诗 -玛 -渴 -脖 -丛 -狼 -翁 -姓 -葫 -芦 -沾 -葵 -の -咔 -粹 -弥 -乖 -悠 -茗 -别 -走 -柒 -榨 -咥 -虹 -沏 -桔 -叔 -贴 -办 -充 -崎 -鮮 -属 -彭 -浦 -町 -郎 -° -悟 -惑 -科 -英 -育 -岁 -幼 -园 -慢 -摆 -_ -狐 -狸 -典 -暴 -帥 -尾 -琼 -見 -望 -烟 -坚 -鸳 -鸯 -直 -校 -饪 -承 -们 -么 -¥ -份 -宇 -炉 -峰 -乃 -趣 -代 -刨 -抖 -音 -占 -谜 -答 -熟 -控 -蕾 -节 -社 -您 -《 -羅 -茉 -瀞 -憨 -尼 -丰 -镇 -酿 -避 -抢 -突 -破 -杰 -姆 -波 -观 -澜 -庫 -舒 -谁 -短 -島 -爷 -码 -每 -欧 -注 -册 -标 -腸 -奈 -熊 -粵 -吳 -衢 -雄 -际 -葱 -柱 -压 -陪 -器 -厘 -柴 -席 -饿 -俏 -汽 -站 -霜 -荟 -禾 -咘 -臭 -夷 -肖 -微 -组 -刺 -拼 -打 -信 -步 -! -说 -囍 -智 -藍 -鹿 -巷 -顾 -勃 -頭 -帕 -徐 -渣 -嗨 -鲍 -抽 -莊 -胗 -耳 -栈 -葑 -谊 -李 -够 -歪 -到 -杜 -绪 -始 -“ -” -编 -感 -谢 -阿 -妹 -抄 -屿 -旁 -钟 -糰 -鷄 -觉 -队 -明 -没 -幺 -罗 -恭 -發 -溢 -圆 -筵 -鲩 -斤 -噜 -府 -雕 -牦 -津 -間 -粤 -义 -驾 -嫩 -眷 -苔 -怡 -逍 -遥 -即 -把 -季 -鹃 -妈 -烙 -淡 -嘟 -班 -散 -磐 -稣 -耍 -芽 -昌 -粿 -鼓 -姑 -央 -告 -翔 -迦 -缆 -怪 -俗 -菩 -宥 -酵 -男 -顿 -蚂 -蚁 -q -緑 -瑩 -養 -滿 -接 -立 -勤 -封 -徽 -酷 -( -慕 -曹 -吊 -咸 -矿 -黛 -刻 -呗 -布 -袋 -钝 -丘 -逗 -窗 -吾 -塔 -坡 -周 -雙 -朝 -末 -如 -杨 -淮 -摄 -影 -翻 -窝 -物 -椰 -荞 -搅 -陇 -收 -两 -倍 -狮 -伊 -後 -晖 -長 -箐 -豪 -耀 -漢 -釜 -宮 -次 -掌 -斯 -朋 -针 -菇 -蚬 -拍 -雒 -陽 -漿 -麵 -條 -部 -← -柜 -驴 -证 -票 -账 -汗 -汆 -稍 -戏 -菋 -卫 -匹 -栋 -馨 -肯 -迪 -邢 -梯 -容 -嘉 -莞 -袁 -锦 -遮 -雨 -篷 -腰 -肺 -剡 -乾 -, -翰 -蔚 -刁 -藤 -帅 -傳 -维 -笔 -历 -史 -】 -适 -煌 -倾 -沧 -姬 -训 -邵 -诺 -敢 -质 -益 -佬 -兼 -职 -盅 -诊 -扬 -速 -宝 -褚 -糁 -钢 -松 -婚 -秀 -盐 -及 -個 -飲 -绍 -槿 -觅 -逼 -兽 -》 -吐 -右 -久 -闺 -祝 -贺 -啦 -瓦 -甏 -探 -辰 -碚 -芳 -灣 -泷 -饰 -隔 -帐 -飮 -搜 -時 -宫 -蘭 -再 -糊 -仓 -稻 -玉 -印 -象 -稀 -拴 -桩 -餃 -贾 -贱 -球 -萌 -撕 -脂 -肪 -层 -晋 -荷 -钱 -潍 -失 -孜 -提 -供 -具 -洛 -涂 -叠 -豊 -积 -媒 -级 -纷 -巧 -瓜 -苹 -琥 -珀 -蜂 -柚 -莉 -爆 -龄 -饸 -饹 -郞 -嫡 -億 -姚 -繁 -监 -督 -示 -佰 -汍 -% -甘 -蔗 -喻 -骄 -基 -因 -匙 -评 -侠 -赢 -交 -歡 -待 -馒 -产 -倡 -导 -低 -茂 -沐 -熙 -延 -丧 -受 -确 -睡 -蓝 -未 -賣 -電 -話 -农 -札 -岗 -树 -赖 -琪 -驻 -辉 -软 -防 -盗 -隐 -形 -纱 -灶 -扎 -环 -禁 -止 -吸 -萬 -昆 -几 -跳 -媳 -婦 -坛 -< -> -拿 -妖 -协 -朱 -住 -宿 -魅 -照 -碰 -滴 -何 -贤 -棒 -持 -啊 -赛 -版 -帆 -順 -狗 -情 -+ -洞 -奋 -斗 -亨 -叶 -涛 -铝 -范 -汀 -號 -律 -價 -鞭 -肩 -# -愚 -奥 -脯 -沁 -奚 -魏 -批 -租 -宠 -炲 -横 -沥 -彪 -投 -诉 -犀 -去 -屠 -鲅 -~ -俱 -徒 -鴻 -劉 -迷 -荤 -威 -曜 -連 -鎖 -馳 -载 -添 -筑 -陵 -佐 -敦 -> -郭 -厢 -祛 -茄 -堰 -漂 -亮 -爅 -虎 -膀 -叼 -猫 -藏 -陶 -鲈 -栏 -… -考 -冲 -胖 -裕 -沃 -挂 -报 -兔 -胶 -臨 -附 -处 -嫂 -萃 -幂 -吻 -聪 -糯 -糍 -棋 -烓 -脊 -衡 -亚 -副 -肤 -荆 -榴 -绚 -黔 -圈 -纳 -课 -逸 -宜 -= -烊 -姨 -施 -救 -贸 -啥 -也 -贯 -雷 -呆 -棠 -伙 -岐 -宛 -媽 -寸 -澳 -已 -還 -兒 -Ⅱ -凯 -株 -藕 -闽 -窖 -瀘 -售 -索 -体 -型 -樂 -琅 -琊 -夺 -扩 -) -诱 -滩 -浓 -要 -芹 -君 -反 -复 -羔 -追 -演 -唱 -過 -綫 -乳 -涩 -芒 -露 -蒙 -羯 -励 -志 -嵊 -閒 -罐 -佛 -墙 -頁 -坐 -眯 -预 -華 -廉 -释 -必 -随 -逐 -引 -究 -爸 -灵 -勺 -岂 -俵 -廷 -苗 -岭 -将 -來 -泛 -朵 -維 -園 -廳 -圳 -伦 -寶 -付 -仅 -減 -谦 -硕 -抚 -慶 -雞 -郝 -计 -熱 -杖 -亭 -喱 -惜 -莒 -另 -陆 -拾 -伍 -谈 -嚼 -娅 -翟 -別 -颈 -邮 -弄 -• -扇 -哦 -吼 -耶 -宅 -帽 -魂 -搭 -笨 -映 -拨 -烂 -馈 -胎 -溶 -\ -善 -销 -难 -忘 -斑 -噢 -錫 -娟 -語 -哨 -筷 -摊 -均 -椅 -改 -换 -跟 -帖 -勾 -缅 -孙 -啪 -栗 -着 -漁 -吓 -易 -漲 -靖 -枸 -馬 -昇 -當 -麥 -妆 -塑 -魯 -鎮 -吗 -魁 -丹 -杈 -技 -术 -泼 -零 -忙 -漾 -創 -攀 -郫 -抿 -稼 -假 -循 -泳 -池 -膨 -巨 -歧 -愛 -鵝 -悉 -灯 -激 -踪 -细 -會 -舔 -愿 -們 -衹 -令 -浔 -丨 -酉 -惦 -耕 -× -闪 -經 -玺 -芯 -襄 -賦 -予 -學 -苑 -托 -丢 -赔 -ā -聽 -濤 -浮 -伯 -兑 -币 -治 -愈 -盱 -眙 -漏 -夕 -搏 -由 -完 -切 -罕 -息 -燃 -叙 -萍 -碑 -腌 -衣 -害 -己 -患 -浙 -闫 -| -芈 -谣 -戴 -錦 -謝 -恩 -芊 -拇 -矾 -政 -锣 -跃 -钥 -寺 -驼 -芙 -插 -恒 -咪 -禄 -摩 -轮 -譚 -鴨 -戊 -申 -丙 -邊 -唯 -登 -困 -貢 -誉 -賀 -认 -准 -妃 -潜 -旨 -死 -桌 -尧 -箱 -届 -获 -顶 -柿 -臂 -蓮 -凭 -慵 -懒 -醇 -籍 -静 -淌 -此 -甚 -绣 -渌 -呢 -问 -抹 -弹 -捷 -邱 -旦 -曉 -艳 -雲 -研 -守 -鼻 -¦ -揽 -含 -沂 -听 -帛 -端 -兆 -舆 -谐 -帘 -笑 -寅 -【 -車 -@ -& -胪 -臻 -蘆 -衙 -餌 -① -鉴 -敬 -枝 -沈 -衔 -蝉 -芜 -烈 -库 -椿 -稳 -’ -豌 -亿 -缙 -獨 -菊 -沤 -迟 -忧 -沫 -伟 -靠 -并 -互 -晓 -枫 -窑 -芭 -夯 -鸿 -無 -烦 -恼 -闖 -贞 -鳥 -厦 -抱 -歐 -藝 -廖 -振 -腦 -舖 -酪 -碎 -浪 -荔 -巫 -撈 -醬 -段 -昔 -潘 -Λ -禧 -妻 -瓢 -柏 -郁 -暹 -兮 -娃 -敏 -進 -距 -离 -倪 -征 -咱 -继 -责 -任 -銅 -啖 -赞 -菲 -蛇 -焰 -娜 -芮 -坦 -磅 -薛 -緣 -乔 -拱 -骚 -扰 -約 -喷 -驢 -仨 -纬 -臘 -邳 -终 -喏 -扫 -除 -恶 -争 -率 -‘ -肃 -雀 -鈴 -贼 -绕 -笋 -钩 -勒 -翠 -黎 -董 -澄 -境 -采 -拳 -捆 -粄 -诸 -暨 -榧 -葛 -親 -戚 -访 -股 -融 -潤 -寄 -递 -藩 -滇 -湛 -他 -篓 -普 -撞 -莅 -但 -沟 -暑 -促 -玲 -腩 -碼 -偏 -楹 -嘎 -洒 -抛 -危 -险 -损 -负 -銘 -黃 -燜 -說 -杆 -称 -蹭 -聊 -妙 -滕 -曦 -肴 -萧 -颗 -剂 -義 -锋 -授 -权 -著 -茴 -蒝 -侬 -顏 -菁 -擦 -鞋 -庞 -毕 -谱 -樱 -→ -綦 -舞 -蹈 -躁 -渠 -俐 -涧 -馀 -潇 -邻 -须 -藻 -纺 -织 -军 -沅 -豐 -爐 -韭 -棚 -綿 -麯 -剑 -娱 -链 -锤 -炼 -献 -晟 -章 -謎 -数 -侯 -她 -疗 -途 -篇 -则 -邓 -赐 -閣 -對 -猩 -邑 -區 -鬼 -莫 -沪 -淼 -赤 -混 -沌 -需 -求 -痛 -绮 -琦 -荃 -熳 -佑 -Á -ō -現 -専 -卢 -譽 -缠 -曾 -鸣 -琴 -汊 -濮 -哇 -哩 -唝 -曲 -坂 -呼 -莴 -怕 -蒋 -伞 -炙 -燻 -瑧 -冈 -讲 -硬 -详 -鹵 -摇 -偃 -嵩 -严 -谨 -′ -剥 -穗 -榮 -禹 -颐 -局 -刚 -▕ -暖 -漠 -炎 -頤 -樟 -? -储 -移 -缕 -艰 -袍 -瑪 -麗 -参 -䬺 -趁 -呦 -霖 -饵 -溪 -孔 -澤 -袜 -蔓 -熠 -显 -屏 -缇 -寇 -亞 -坑 -槟 -榔 -絳 -驿 -歹 -匾 -猴 -旭 -竞 -­ -唛 -介 -习 -涡 -寓 -掉 -蘸 -愉 -佼 -ǒ -納 -∶ -革 -嚸 -募 -螃 -鲢 -俤 -扁 -寳 -辽 -∧ -厚 -裤 -扯 -屯 -废 -挪 -辘 -碉 -歇 -漓 -腻 -捣 -孩 -烁 -整 -按 -Ⓡ -眉 -脸 -痣 -粑 -序 -穿 -樊 -玮 -★ -扑 -渊 -醴 -瑶 -農 -檔 -憩 -霊 -赫 -呜 -~ -备 -説 -莓 -钻 -播 -冻 -紅 -菽 -喪 -埔 -壽 -❤ -籽 -咻 -籣 -尹 -潭 -穆 -壮 -使 -霄 -蔵 -浒 -岳 -熘 -臺 -殷 -孤 -邂 -逅 -厕 -郸 -铭 -莆 -抻 -虽 -倦 -怠 -矣 -茵 -垂 -殿 -鄂 -嗑 -续 -钦 -党 -鲫 -蔡 -侧 -割 -彰 -凝 -熬 -叕 -純 -谛 -籠 -宋 -峡 -俩 -雜 -跑 -⑧ -焼 -- -逢 -澧 -舵 -异 -冯 -战 -决 -棍 -; -﹣ -丑 -妇 -焉 -芷 -楂 -坞 -壳 -馐 -帜 -旅 -鳯 -簡 -凍 -秜 -结 -咩 -丫 -稠 -暗 -缔 -乎 -被 -狠 -皲 -豉 -崇 -渭 -担 -鹤 -製 -蛎 -笛 -奔 -赴 -盼 -鳌 -拜 -络 -灸 -膜 -刮 -痧 -毒 -萊 -陂 -濑 -唇 -抵 -押 -置 -馇 -泌 -尿 -傻 -像 -孃 -陣 -靓 -规 -企 -矮 -凳 -贰 -兎 -庵 -質 -阅 -读 -◆ -练 -墩 -曼 -呱 -泓 -耐 -磁 -枣 -罉 -浴 -氧 -洱 -鳅 -線 -炳 -顽 -符 -倌 -泥 -郊 -柯 -餘 -巍 -论 -沽 -荘 -奕 -啃 -髙 -○ -芬 -苟 -且 -阆 -確 -獅 -匣 -睫 -牙 -戒 -俊 -阜 -遵 -爵 -遗 -捧 -仑 -构 -豬 -挡 -弓 -蠔 -旬 -鱻 -镖 -燚 -歌 -壁 -啫 -饷 -仰 -韶 -勞 -軒 -菒 -炫 -廊 -塞 -脏 -堤 -浅 -辈 -靡 -裙 -尺 -廚 -向 -磊 -咬 -皓 -卿 -懂 -葉 -廿 -芸 -賴 -埠 -應 -碟 -溧 -訂 -選 -睦 -举 -钳 -哟 -霍 -扞 -侣 -營 -龟 -钜 -埭 -が -搽 -螞 -蟻 -娚 -蒜 -厝 -垵 -☎ -捌 -倒 -骑 -Ξ -谋 -黍 -侍 -赏 -扮 -忱 -蘑 -洁 -嘆 -闹 -谭 -鶏 -種 -φ -坤 -麓 -麒 -麟 -喂 -琳 -Ⓑ -趙 -總 -這 -奖 -取 -拔 -錯 -仉 -缸 -廟 -暢 -腔 -卓 -腱 -朙 -紹 -莹 -缺 -抺 -睿 -氣 -该 -貼 -妍 -拆 -穇 -箩 -希 -廰 -祗 -盲 -坝 -骆 -熄 -蛮 -賓 -馮 -尋 -泊 -孫 -槁 -亖 -俯 -浣 -婴 -锨 -馥 -闷 -梆 -▫ -姥 -哲 -录 -甫 -床 -嬌 -烎 -梵 -枪 -乍 -璜 -羌 -崂 -穷 -榕 -聲 -喚 -駕 -晕 -嬷 -箕 -婧 -盧 -楓 -柃 -差 -「 -」 -佶 -唔 -壕 -歆 -盏 -擂 -睇 -巾 -查 -淖 -哪 -沣 -赣 -優 -諾 -礁 -努 -畔 -疙 -瘩 -握 -叮 -栙 -甑 -嶺 -涌 -透 -钓 -斜 -搬 -迁 -妨 -借 -仍 -鳕 -瓷 -绘 -餠 -á -ǎ -祈 -邨 -醒 -闵 -砖 -锹 -咀 -綠 -幕 -忠 -雾 -覓 -靜 -擔 -篮 -杉 -势 -薇 -甬 -频 -般 -仲 -蘇 -鸟 -卞 -憾 -資 -駱 -蝶 -為 -仟 -耗 -莘 -涉 -昕 -盈 -熹 -觀 -瑭 -湃 -兢 -淞 -䒩 -結 -柗 -鲤 -糟 -粕 -塗 -簽 -怎 -桐 -皆 -羽 -盯 -氽 -晏 -液 -镀 -珂 -悸 -∙ -桑 -夢 -楽 -剩 -纵 -逝 -欺 -統 -飛 -姣 -俄 -揪 -薡 -幅 -蓋 -︳ -屉 -㕔 -а -铸 -韦 -銀 -檀 -击 -伿 -隍 -『 -』 -芥 -☆ -声 -跆 -肋 -榭 -牵 -棧 -網 -愁 -嗏 -嵗 -巡 -稚 -貴 -買 -恰 -㸆 -捻 -玫 -瑰 -炕 -梧 -餡 -锌 -焱 -驰 -堽 -邯 -珑 -尕 -宰 -栓 -喃 -殊 -燊 -慈 -羴 -逃 -脱 -邹 -檐 -碌 -页 -荠 -券 -題 -龚 -肌 -蕉 -囬 -肫 -坪 -沉 -淀 -斌 -鳝 -核 -喳 -剃 -昭 -{ -} -坏 -烜 -媛 -猛 -桓 -欣 -碁 -竭 -堇 -↑ -扛 -罄 -栾 -鲶 -鍕 -崔 -橘 -携 -丈 -射 -梗 -檸 -疼 -卑 -捉 -障 -裏 -遍 -蓓 -析 -許 -虫 -坨 -馔 -窄 -姫 -噤 -係 -湿 -汐 -鳜 -船 -崽 -+ -例 -灼 -祿 -腥 -峭 -酌 -喽 -件 -郏 -栀 -鲨 -寫 -與 -诈 -斥 -炮 -稿 -懿 -掂 -鹭 -乱 -恬 -婷 -苦 -埃 -珊 -禅 -裹 -圃 -鹌 -鹑 -û -澡 -囧 -阡 -靑 -警 -牢 -嘱 -鳞 -浃 -贷 -慧 -翊 -讨 -碧 -剪 -陌 -冀 -砵 -迅 -鹰 -竟 -召 -敌 -鯡 -蒌 -蒿 -扶 -③ -誘 -嘻 -輪 -嬢 -瓮 -絲 -嚣 -荀 -莽 -鄧 -咋 -勿 -佈 -洽 -羹 -模 -貨 -粱 -凈 -腹 -鄭 -署 -儒 -隧 -鉢 -茫 -蔻 -í -ó -裴 -偉 -Θ -祎 -褥 -殖 -湫 -瀚 -貓 -汪 -紙 -極 -伤 -灰 -團 -橄 -榄 -拽 -响 -貌 -傣 -舂 -斩 -飨 -执 -諸 -蒂 -嘣 -葡 -渤 -惺 -驛 -戰 -箬 -俭 -瀏 -嫦 -琵 -琶 -咿 -吖 -舱 -韵 -揭 -祁 -將 -軍 -吟 -彼 -岚 -绒 -煤 -淝 -歸 -锐 -嗯 -傾 -甩 -瞳 -睁 -鳗 -遜 -嗲 -虚 -娴 -碱 -呷 -{ -哚 -兜 -喇 -叭 -燦 -逻 -匪 -槐 -撒 -写 -踩 -踏 -霞 -喫 -返 -赚 -拓 -動 -觞 -鲽 -鐘 -闰 -扳 -沖 -賈 -璐 -煸 -棵 -峪 -π -憶 -齋 -娇 -穎 -嫁 -玥 -胚 -喊 -阻 -餓 -截 -孵 -屎 -爾 -莳 -倔 -娄 -祸 -` -姿 -稽 -戌 -缪 -ī -糠 -痴 -猎 -嬉 -柑 -鞍 -兹 -凼 -舅 -褐 -醪 -仪 -氷 -單 -丞 -碛 -绽 -袂 -檢 -瀾 -饃 -孖 -雍 -ò -螄 -涤 -茨 -寮 -近 -辜 -茅 -孟 -累 -宣 -樹 -鷹 -膝 -臉 -襪 -嘢 -嵐 -▲ -璇 -竺 -気 -迈 -糐 -挥 -瑜 -伽 -" -裳 -纹 -潯 -幾 -朔 -枊 -釀 -劝 -俺 -粢 -馓 -胥 -拥 -嘶 -達 -蝴 -昱 -ホ -ル -モ -ニ -颂 -噫 -否 -笙 -绎 -俞 -泵 -测 -耿 -揚 -犇 -锄 -卧 -炯 -烽 -橡 -操 -齊 -隴 -宀 -荥 -滙 -贪 -関 -垦 -↓ -麽 -暧 -匯 -恨 -叽 -断 -鮪 -椎 -病 -迹 -禺 -搓 -瀛 -唤 -埕 -愤 -怒 -拐 -狱 -垅 -绅 -設 -計 -書 -楷 -鮨 -邪 -郴 -盞 -榆 -恺 -樵 -煙 -舫 -翡 -砸 -叹 -縣 -璞 -禮 -獻 -似 -吆 -嘛 -灭 -擇 -夥 -ē -曰 -蜗 -櫻 -▏ -鑪 -鯊 -視 -淄 -钰 -〝 -〞 -報 -退 -壶 -鳴 -拒 -旱 -鼠 -蕴 -峧 -赶 -咏 -寬 -渎 -靣 -卟 -宙 -趟 -負 -镫 -讷 -迭 -彝 -樣 -輕 -却 -覆 -庖 -扉 -聖 -喬 -瞻 -瞿 -箭 -胆 -ε -韧 -誌 -既 -淳 -饞 -ě -圍 -墟 -俚 -翕 -貂 -畜 -緹 -搄 -旮 -旯 -寂 -寞 -詹 -茜 -鉄 -絕 -泸 -嬤 -允 -炘 -骏 -侑 -晒 -玄 -粧 -糘 -毫 -幽 -攸 -愧 -侨 -衰 -ぉ -に -き -ぃ -炽 -倉 -斛 -領 -盾 -窜 -鲷 -瓏 -媚 -爲 -裸 -窦 -虞 -處 -魷 -} -羡 -冕 -祺 -裁 -粶 -䬴 -嚟 -辆 -撮 -隋 -' -勝 -梭 -茸 -咭 -崟 -滷 -緻 -沩 -颠 -诠 -珺 -拙 -察 -≡ -辅 -父 -雁 -裱 -瞄 -漖 -鯨 -略 -橱 -帼 -棉 -濠 -蕃 -ǔ -崮 -阮 -勋 -苍 -喔 -猜 -箔 -è -雏 -睐 -袭 -皋 -彻 -売 -垚 -咯 -凑 -汴 -纽 -巩 -宸 -墅 -茏 -裡 -昧 -飽 -坯 -濟 -└ -┐ -懷 -霾 -´ -閑 -茹 -闳 -湶 -鈣 -圓 -昊 -眞 -標 -凖 -皱 -箍 -筹 -孬 -唠 -輝 -输 -綺 -驭 -哼 -匡 -偵 -蝇 -運 -漟 -乘 -Ē -卉 -邴 -謠 -怿 -亁 -棱 -呐 -湄 -莜 -阶 -堔 -炜 -邀 -笠 -遏 -犯 -罪 -栢 -餛 -亀 -苓 -膏 -伸 -? -阪 -委 -妯 -娌 -仝 -咧 -鍚 -▼ -遠 -摑 -滘 -颁 -ʌ -锈 -佤 -佗 -卌 -É -↙ -蔺 -汰 -塍 -認 -鳟 -畿 -耦 -吨 -䒕 -茬 -枼 -饕 -涼 -烀 -汶 -齿 -貳 -沱 -楞 -屹 -掺 -挢 -荻 -偷 -辶 -饌 -泮 -喧 -某 -聂 -夾 -吁 -鎬 -谅 -鞘 -泪 -佩 -㎡ -鐡 -犊 -漳 -睢 -粘 -輔 -爬 -濃 -し -ん -い -ち -ょ -く -ど -ぅ -戍 -咚 -蒡 -惯 -隣 -沭 -撇 -妞 -筛 -昵 -赁 -震 -欠 -涞 -從 -靚 -绥 -俑 -熔 -曙 -侗 -√ -仗 -袖 -饶 -辫 -琉 -鴿 -裂 -缝 -灞 -崖 -炑 -昝 -┌ -┘ -邕 -趴 -踢 -迩 -浈 -挚 -聆 -犁 -陝 -滾 -彎 -問 -癮 -砚 -ú -瀧 -吮 -毓 -劵 -槽 -黒 -忍 -畈 -姊 -沛 -忽 -摘 -燍 -♡ -汝 -贛 -叻 -甸 -乞 -丐 -践 -嗞 -㥁 -斐 -圖 -祯 -牤 -攻 -弯 -幹 -杠 -苞 -滤 -筆 -練 -鞑 -ˊ -萤 -榶 -叨 -轨 -耒 -嚮 -┃ -漪 -剛 -键 -弋 -彦 -瘋 -词 -敖 -鸦 -秧 -囚 -绾 -镶 -濂 -↘ -豁 -煒 -萄 -珲 -緋 -昂 -瀨 -缓 -疲 -替 -汥 -殡 -葬 -靳 -揉 -闭 -睛 -偘 -佚 -$ -; -^''' diff --git a/models/text_recognition_crnn/demo.cpp b/models/text_recognition_crnn/demo.cpp deleted file mode 100644 index 0da944c9..00000000 --- a/models/text_recognition_crnn/demo.cpp +++ /dev/null @@ -1,294 +0,0 @@ -#include -#include - - -#include -#include -#include - -#include "charset_32_94_3944.h" - -using namespace std; -using namespace cv; -using namespace dnn; - -vector< pair > backendTargetPairs = { - std::make_pair(dnn::DNN_BACKEND_OPENCV, dnn::DNN_TARGET_CPU), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA), - std::make_pair(dnn::DNN_BACKEND_CUDA, dnn::DNN_TARGET_CUDA_FP16), - std::make_pair(dnn::DNN_BACKEND_TIMVX, dnn::DNN_TARGET_NPU), - std::make_pair(dnn::DNN_BACKEND_CANN, dnn::DNN_TARGET_NPU)}; - -vector loadCharset(string); - -std::string keys = -"{ help h | | Print help message. }" -"{ model m | text_recognition_CRNN_EN_2021sep.onnx | Usage: Set model type, defaults to text_recognition_CRNN_EN_2021sep.onnx }" -"{ input i | | Usage: Path to input image or video file. Skip this argument to capture frames from a camera.}" -"{ width | 736 | Usage: Resize input image to certain width, default = 736. It should be multiple by 32.}" -"{ height | 736 | Usage: Resize input image to certain height, default = 736. It should be multiple by 32.}" -"{ binary_threshold | 0.3 | Usage: Threshold of the binary map, default = 0.3.}" -"{ polygon_threshold | 0.5 | Usage: Threshold of polygons, default = 0.5.}" -"{ max_candidates | 200 | Usage: Set maximum number of polygon candidates, default = 200.}" -"{ unclip_ratio | 2.0 | Usage: The unclip ratio of the detected text region, which determines the output size, default = 2.0.}" -"{ save s | 1 | Usage: Specify to save file with results (i.e. bounding box, confidence level). Invalid in case of camera input.}" -"{ viz v | 1 | Usage: Specify to open a new window to show results.}" -"{ backend bt | 0 | Choose one of computation backends: " -"0: (default) OpenCV implementation + CPU, " -"1: CUDA + GPU (CUDA), " -"2: CUDA + GPU (CUDA FP16), " -"3: TIM-VX + NPU, " -"4: CANN + NPU}"; - - -class PPOCRDet { -public: - - PPOCRDet(string modPath, Size inSize = Size(736, 736), float binThresh = 0.3, - float polyThresh = 0.5, int maxCand = 200, double unRatio = 2.0, - dnn::Backend bId = DNN_BACKEND_DEFAULT, dnn::Target tId = DNN_TARGET_CPU) : modelPath(modPath), inputSize(inSize), binaryThreshold(binThresh), - polygonThreshold(polyThresh), maxCandidates(maxCand), unclipRatio(unRatio), - backendId(bId), targetId(tId) - { - this->model = TextDetectionModel_DB(readNet(modelPath)); - this->model.setPreferableBackend(backendId); - this->model.setPreferableTarget(targetId); - - this->model.setBinaryThreshold(binaryThreshold); - this->model.setPolygonThreshold(polygonThreshold); - this->model.setUnclipRatio(unclipRatio); - this->model.setMaxCandidates(maxCandidates); - - this->model.setInputParams(1.0 / 255.0, inputSize, Scalar(122.67891434, 116.66876762, 104.00698793)); - } - pair< vector>, vector > infer(Mat image) { - CV_Assert(image.rows == this->inputSize.height && "height of input image != net input size "); - CV_Assert(image.cols == this->inputSize.width && "width of input image != net input size "); - vector> pt; - vector confidence; - this->model.detect(image, pt, confidence); - return make_pair< vector> &, vector< float > &>(pt, confidence); - } - -private: - string modelPath; - TextDetectionModel_DB model; - Size inputSize; - float binaryThreshold; - float polygonThreshold; - int maxCandidates; - double unclipRatio; - dnn::Backend backendId; - dnn::Target targetId; - -}; - - - -class CRNN { -private: - string modelPath; - dnn::Backend backendId; - dnn::Target targetId; - Net model; - vector charset; - Size inputSize; - Mat targetVertices; - -public: - CRNN(string modPath, dnn::Backend bId = DNN_BACKEND_DEFAULT, dnn::Target tId = DNN_TARGET_CPU) : modelPath(modPath), backendId(bId), targetId(tId) { - - this->model = readNet(this->modelPath); - this->model.setPreferableBackend(this->backendId); - this->model.setPreferableTarget(this->targetId); - // load charset by the name of model - if (this->modelPath.find("_EN_") != string::npos) - this->charset = loadCharset("CHARSET_EN_36"); - else if (this->modelPath.find("_CH_") != string::npos) - this->charset = loadCharset("CHARSET_CH_94"); - else if (this->modelPath.find("_CN_") != string::npos) - this->charset = loadCharset("CHARSET_CN_3944"); - else - CV_Error(-1, "Charset not supported! Exiting ..."); - - this->inputSize = Size(100, 32); // Fixed - this->targetVertices = Mat(4, 1, CV_32FC2); - this->targetVertices.row(0) = Vec2f(0, this->inputSize.height - 1); - this->targetVertices.row(1) = Vec2f(0, 0); - this->targetVertices.row(2) = Vec2f(this->inputSize.width - 1, 0); - this->targetVertices.row(3) = Vec2f(this->inputSize.width - 1, this->inputSize.height - 1); - } - - Mat preprocess(Mat image, Mat rbbox) - { - // Remove conf, reshape and ensure all is np.float32 - Mat vertices; - rbbox.reshape(2, 4).convertTo(vertices, CV_32FC2); - - Mat rotationMatrix = getPerspectiveTransform(vertices, this->targetVertices); - Mat cropped; - warpPerspective(image, cropped, rotationMatrix, this->inputSize); - - // 'CN' can detect digits (0\~9), upper/lower-case letters (a\~z and A\~Z), and some special characters - // 'CH' can detect digits (0\~9), upper/lower-case le6tters (a\~z and A\~Z), some Chinese characters and some special characters - if (this->modelPath.find("CN") == string::npos && this->modelPath.find("CH") == string::npos) - cvtColor(cropped, cropped, COLOR_BGR2GRAY); - Mat blob = blobFromImage(cropped, 1 / 127.5, this->inputSize, Scalar::all(127.5)); - return blob; - } - - u16string infer(Mat image, Mat rbbox) - { - // Preprocess - Mat inputBlob = this->preprocess(image, rbbox); - - // Forward - this->model.setInput(inputBlob); - Mat outputBlob = this->model.forward(); - - // Postprocess - u16string results = this->postprocess(outputBlob); - - return results; - } - - u16string postprocess(Mat outputBlob) - { - // Decode charaters from outputBlob - Mat character = outputBlob.reshape(1, outputBlob.size[0]); - u16string text(u""); - for (int i = 0; i < character.rows; i++) - { - double minVal, maxVal; - Point maxIdx; - minMaxLoc(character.row(i), &minVal, &maxVal, nullptr, &maxIdx); - if (maxIdx.x != 0) - text += charset[maxIdx.x - 1]; - else - text += u"-"; - } - // adjacent same letters as well as background text must be removed to get the final output - u16string textFilter(u""); - - for (int i = 0; i < text.size(); i++) - if (text[i] != u'-' && !(i > 0 && text[i] == text[i - 1])) - textFilter += text[i]; - return textFilter; - } -}; - - -Mat visualize(Mat image, pair< vector>, vector >&results, double fps=-1, Scalar boxColor=Scalar(0, 255, 0), Scalar textColor=Scalar(0, 0, 255), bool isClosed=true, int thickness=2) -{ - Mat output; - image.copyTo(output); - if (fps > 0) - putText(output, format("FPS: %.2f", fps), Point(0, 15), FONT_HERSHEY_SIMPLEX, 0.5, textColor); - polylines(output, results.first, isClosed, boxColor, thickness); - return output; -} - -int main(int argc, char** argv) -{ - CommandLineParser parser(argc, argv, keys); - - parser.about("An End-to-End Trainable Neural Network for Image-based Sequence Recognition and Its Application to Scene Text Recognition (https://arxiv.org/abs/1507.05717)"); - if (parser.has("help")) - { - parser.printMessage(); - return 0; - } - - int backendTargetid = parser.get("backend"); - String modelPath = parser.get("model"); - - if (modelPath.empty()) - { - CV_Error(Error::StsError, "Model file " + modelPath + " not found"); - } - - Size inpSize(parser.get("width"), parser.get("height")); - float binThresh = parser.get("binary_threshold"); - float polyThresh = parser.get("polygon_threshold"); - int maxCand = parser.get("max_candidates"); - double unRatio = parser.get("unclip_ratio"); - bool save = parser.get("save"); - bool viz = parser.get("viz"); - - PPOCRDet detector("../text_detection_ppocr/text_detection_en_ppocrv3_2023may.onnx", inpSize, binThresh, polyThresh, maxCand, unRatio, backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - CRNN recognizer(modelPath, backendTargetPairs[backendTargetid].first, backendTargetPairs[backendTargetid].second); - //! [Open a video file or an image file or a camera stream] - VideoCapture cap; - if (parser.has("input")) - cap.open(parser.get("input")); - else - cap.open(0); - if (!cap.isOpened()) - CV_Error(Error::StsError, "Cannot open video or file"); - Mat originalImage; - static const std::string kWinName = modelPath; - while (waitKey(1) < 0) - { - cap >> originalImage; - if (originalImage.empty()) - { - if (parser.has("input")) - { - cout << "Frame is empty" << endl; - break; - } - else - continue; - } - int originalW = originalImage.cols; - int originalH = originalImage.rows; - double scaleHeight = originalH / double(inpSize.height); - double scaleWidth = originalW / double(inpSize.width); - Mat image; - resize(originalImage, image, inpSize); - - // inference of text detector - TickMeter tm; - tm.start(); - pair< vector>, vector > results = detector.infer(image); - tm.stop(); - if (results.first.size() > 0 && results.second.size() > 0) - { - u16string texts; - auto score=results.second.begin(); - for (auto box : results.first) - { - Mat result = Mat(box).reshape(2, 4); - texts = texts + u"'" + recognizer.infer(image, result) + u"'"; - } - std::wstring_convert, char16_t> converter; - std::cout << converter.to_bytes(texts) << std::endl; - } - auto x = results.first; - // Scale the results bounding box - for (auto &pts : results.first) - { - for (int i = 0; i < 4; i++) - { - pts[i].x = int(pts[i].x * scaleWidth); - pts[i].y = int(pts[i].y * scaleHeight); - } - } - originalImage = visualize(originalImage, results, tm.getFPS()); - tm.reset(); - if (parser.has("input")) - { - if (save) - { - cout << "Result image saved to result.jpg\n"; - imwrite("result.jpg", originalImage); - } - if (viz) - imshow(kWinName, originalImage); - } - else - imshow(kWinName, originalImage); - - } - return 0; -} diff --git a/models/text_recognition_crnn/demo.py b/models/text_recognition_crnn/demo.py deleted file mode 100644 index 02ba036f..00000000 --- a/models/text_recognition_crnn/demo.py +++ /dev/null @@ -1,169 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import sys -import argparse - -import numpy as np -import cv2 as cv - -# Check OpenCV version -opencv_python_version = lambda str_version: tuple(map(int, (str_version.split(".")))) -assert opencv_python_version(cv.__version__) >= opencv_python_version("4.10.0"), \ - "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python" - -from crnn import CRNN - -sys.path.append('../text_detection_ppocr') -from ppocr_det import PPOCRDet - -# Valid combinations of backends and targets -backend_target_pairs = [ - [cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA], - [cv.dnn.DNN_BACKEND_CUDA, cv.dnn.DNN_TARGET_CUDA_FP16], - [cv.dnn.DNN_BACKEND_TIMVX, cv.dnn.DNN_TARGET_NPU], - [cv.dnn.DNN_BACKEND_CANN, cv.dnn.DNN_TARGET_NPU] -] - -parser = argparse.ArgumentParser( - description="An End-to-End Trainable Neural Network for Image-based Sequence Recognition and Its Application to Scene Text Recognition (https://arxiv.org/abs/1507.05717)") -parser.add_argument('--input', '-i', type=str, - help='Usage: Set path to the input image. Omit for using default camera.') -parser.add_argument('--model', '-m', type=str, default='text_recognition_CRNN_EN_2021sep.onnx', - help='Usage: Set model path, defaults to text_recognition_CRNN_EN_2021sep.onnx.') -parser.add_argument('--backend_target', '-bt', type=int, default=0, - help='''Choose one of the backend-target pair to run this demo: - {:d}: (default) OpenCV implementation + CPU, - {:d}: CUDA + GPU (CUDA), - {:d}: CUDA + GPU (CUDA FP16), - {:d}: TIM-VX + NPU, - {:d}: CANN + NPU - '''.format(*[x for x in range(len(backend_target_pairs))])) -parser.add_argument('--width', type=int, default=736, - help='Preprocess input image by resizing to a specific width. It should be multiple by 32.') -parser.add_argument('--height', type=int, default=736, - help='Preprocess input image by resizing to a specific height. It should be multiple by 32.') -parser.add_argument('--save', '-s', action='store_true', - help='Usage: Specify to save a file with results. Invalid in case of camera input.') -parser.add_argument('--vis', '-v', action='store_true', - help='Usage: Specify to open a new window to show results. Invalid in case of camera input.') -args = parser.parse_args() - -def visualize(image, boxes, texts, color=(0, 255, 0), isClosed=True, thickness=2): - output = image.copy() - - pts = np.array(boxes[0]) - output = cv.polylines(output, pts, isClosed, color, thickness) - for box, text in zip(boxes[0], texts): - cv.putText(output, text, (box[1].astype(np.int32)), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255)) - return output - -if __name__ == '__main__': - backend_id = backend_target_pairs[args.backend_target][0] - target_id = backend_target_pairs[args.backend_target][1] - - # Instantiate PPOCRDet for text detection - detector = PPOCRDet(modelPath='../text_detection_ppocr/text_detection_en_ppocrv3_2023may.onnx', - inputSize=[args.width, args.height], - binaryThreshold=0.3, - polygonThreshold=0.5, - maxCandidates=200, - unclipRatio=2.0, - backendId=backend_id, - targetId=target_id) - # Instantiate CRNN for text recognition - recognizer = CRNN(modelPath=args.model, backendId=backend_id, targetId=target_id) - - # If input is an image - if args.input is not None: - original_image = cv.imread(args.input) - original_w = original_image.shape[1] - original_h = original_image.shape[0] - scaleHeight = original_h / args.height - scaleWidth = original_w / args.width - image = cv.resize(original_image, [args.width, args.height]) - - # Inference - results = detector.infer(image) - texts = [] - for box, score in zip(results[0], results[1]): - texts.append( - recognizer.infer(image, box.reshape(8)) - ) - - # Scale the results bounding box - for i in range(len(results[0])): - for j in range(4): - box = results[0][i][j] - results[0][i][j][0] = box[0] * scaleWidth - results[0][i][j][1] = box[1] * scaleHeight - - # Draw results on the input image - original_image = visualize(original_image, results, texts) - - # Save results if save is true - if args.save: - print('Results saved to result.jpg\n') - cv.imwrite('result.jpg', original_image) - - # Visualize results in a new window - if args.vis: - cv.namedWindow(args.input, cv.WINDOW_AUTOSIZE) - cv.imshow(args.input, original_image) - cv.waitKey(0) - else: # Omit input to call default camera - deviceId = 0 - cap = cv.VideoCapture(deviceId) - - tm = cv.TickMeter() - while cv.waitKey(1) < 0: - hasFrame, original_image = cap.read() - if not hasFrame: - print('No frames grabbed!') - break - - original_w = original_image.shape[1] - original_h = original_image.shape[0] - scaleHeight = original_h / args.height - scaleWidth = original_w / args.width - - frame = cv.resize(original_image, [args.width, args.height]) - # Inference of text detector - tm.start() - results = detector.infer(frame) - tm.stop() - cv.putText(frame, 'Latency - {}: {:.2f}'.format(detector.name, tm.getFPS()), (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255)) - tm.reset() - - # Inference of text recognizer - if len(results[0]) and len(results[1]): - texts = [] - tm.start() - for box, score in zip(results[0], results[1]): - result = np.hstack( - (box.reshape(8), score) - ) - texts.append( - recognizer.infer(frame, box.reshape(8)) - ) - tm.stop() - cv.putText(frame, 'Latency - {}: {:.2f}'.format(recognizer.name, tm.getFPS()), (0, 30), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255)) - tm.reset() - - # Scale the results bounding box - for i in range(len(results[0])): - for j in range(4): - box = results[0][i][j] - results[0][i][j][0] = box[0] * scaleWidth - results[0][i][j][1] = box[1] * scaleHeight - - # Draw results on the input image - original_image = visualize(original_image, results, texts) - print(texts) - - # Visualize results in a new Window - cv.imshow('{} Demo'.format(recognizer.name), original_image) diff --git a/models/text_recognition_crnn/example_outputs/CRNNCTC.gif b/models/text_recognition_crnn/example_outputs/CRNNCTC.gif deleted file mode 100644 index 09689aaf..00000000 --- a/models/text_recognition_crnn/example_outputs/CRNNCTC.gif +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ad60d87b58f365d168ae4d444dc27306e6d379ed16dbe82b44f443a43f4e65db -size 5249246 diff --git a/models/text_recognition_crnn/example_outputs/demo.jpg b/models/text_recognition_crnn/example_outputs/demo.jpg deleted file mode 100644 index 35ae4184..00000000 --- a/models/text_recognition_crnn/example_outputs/demo.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:93b5838416d9d131f7a0fe3f00addfce0ed984052c15f69a8904d553066aa0aa -size 39430 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep.onnx b/models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep.onnx deleted file mode 100644 index 443f5dec..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2dc566fd01ac2118b25c6960508ebd758b64c421a2bfa78dc05401ada6737e0b -size 64906971 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep_int8bq.onnx b/models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep_int8bq.onnx deleted file mode 100644 index b737148f..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_CH_2021sep_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c205e1a1a3bc5a1d585ea380b55d2801ba655c473bd5d41730fc14d6341f2e16 -size 26887550 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_CH_2022oct_int8.onnx b/models/text_recognition_crnn/text_recognition_CRNN_CH_2022oct_int8.onnx deleted file mode 100644 index 089d9ba5..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_CH_2022oct_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c2bc75af1998c0b608f86ab875cbdcd109d18b27ff0d9872e7b7429fd1945f3a -size 25783320 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_CH_2023feb_fp16.onnx b/models/text_recognition_crnn/text_recognition_CRNN_CH_2023feb_fp16.onnx deleted file mode 100644 index c619d4da..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_CH_2023feb_fp16.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cfef028889b3a21771e687d501ac38ccab6d37d199e94f244d60cc21f743526b -size 32472394 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov.onnx b/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov.onnx deleted file mode 100644 index e48fe7fe..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c760bf82d684b87dfabb288e6c0f92d41a8cd6c1780661ca2c3cd10c2065a9ba -size 72807160 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8.onnx b/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8.onnx deleted file mode 100644 index a6a987f5..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:63b37da9f35d1861fb1af40ab82313794291ad49c950374dc4ed232b56e1b656 -size 27710536 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8bq.onnx b/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8bq.onnx deleted file mode 100644 index 7be6a406..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov_int8bq.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a5a5aac86dbb06be8853ec07d1f5e8bac810a6cc4233390a3c020487b9a7881c -size 29026387 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_EN_2021sep.onnx b/models/text_recognition_crnn/text_recognition_CRNN_EN_2021sep.onnx deleted file mode 100644 index ff13f288..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_EN_2021sep.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a84b1f6e11a65c2d733cb0cc1f014aae3f99051e3f11447dc282faa678eee544 -size 33823087 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_EN_2022oct_int8.onnx b/models/text_recognition_crnn/text_recognition_CRNN_EN_2022oct_int8.onnx deleted file mode 100644 index d9d2a04d..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_EN_2022oct_int8.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:94117b4c2652337b3f1aef81b2ec15a74e97973b1c58f743e86380b95b95ffa2 -size 16378030 diff --git a/models/text_recognition_crnn/text_recognition_CRNN_EN_2023feb_fp16.onnx b/models/text_recognition_crnn/text_recognition_CRNN_EN_2023feb_fp16.onnx deleted file mode 100644 index 8017c197..00000000 --- a/models/text_recognition_crnn/text_recognition_CRNN_EN_2023feb_fp16.onnx +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e785f79aeb817e1600b18fad5e740bddc281a9eb053d648f163af335a32d59d0 -size 16916177 diff --git a/reports/2023-4.9.0/assets/benchmark_table_4.9.0.png b/reports/2023-4.9.0/assets/benchmark_table_4.9.0.png deleted file mode 100644 index d0cd6fa1..00000000 --- a/reports/2023-4.9.0/assets/benchmark_table_4.9.0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:978f537c9a967810915ba049ffb667fb41c2b9b22be90c0c334a350e1ac37505 -size 311950 diff --git a/reports/2023-4.9.0/opencv_zoo_report-cn-2023-4.9.0.md b/reports/2023-4.9.0/opencv_zoo_report-cn-2023-4.9.0.md deleted file mode 100644 index 85c8d7bf..00000000 --- a/reports/2023-4.9.0/opencv_zoo_report-cn-2023-4.9.0.md +++ /dev/null @@ -1,53 +0,0 @@ -# OpenCV Model Zoo报告 - 模型、板卡和性能基准结果分析 - -[![benchmark_table](assets/benchmark_table_4.9.0.png)](benchmark_table) - -[OpenCV Model Zoo](https://github.com/opencv/opencv_zoo)项目于2021年9月启动。从那时起,我们已收集了43个模型权重,涵盖19个任务,并添加了13种硬件设置,涵盖不同的CPU架构(x86-64、ARM和RISC-V)以及不同的计算单元(CPU、GPU和NPU)。所有这些模型和硬件都经过我们的全面测试,并保证与OpenCV的最新版本(目前是4.9.0)兼容,如我们的基准表所示。 - -## Models - -截至此版本发布,我们在opencv_zoo中共有43个模型权重,涵盖了总共19个任务。这些模型是考虑到许可证的,这意味着基本上您可以为任何目的使用opencv_zoo中的所有模型,甚至用于商业用途。它们主要来自以下4个来源: - -- OpenCV中国团队。YuNet人脸检测模型由我们团队的一名成员开发和维护。 -- OpenCV Area Chair。这是由OpenCV基金会启动的一个项目,详情可以在[这里](https://opencv.org/opencv-area-chairs/)找到。人脸识别的SFace模型和面部表情识别的FER模型是由Area Chair邓教授贡献的。 -- 与OpenCV的合作。人体分割模型来自百度PaddlePaddle,修改后的YuNet用于车牌检测来自[watrix.ai](watrix.ai)。 -- OpenCV社区。从2022年开始,我们在Google Summer of Code(GSoC)计划中有关于模型贡献的项目想法。GSoC学生已成功贡献了6个模型,涵盖了目标检测、目标跟踪和光流估计等任务。 - -我们欢迎您的贡献! - -此外,我们为每个模型提供了在最新版本的OpenCV中可立即使用的Python和C++演示。我们还提供了[可视化样例](https://github.com/opencv/opencv_zoo?tab=readme-ov-file#some-examples),以便开发者们更好地了解任务和输出的类型。 - -## Boards - -opencv_zoo中有13种硬件设置,其中之一是搭载Intel i7-12700K的 PC,其他都是单板计算机(SBC)。它们按CPU架构分类如下: - - - -x86-64: - -- Intel Core i7-12700K:8 P核(3.60GHz,4.90GHz turbo),4 E核(2.70GHz,3.80GHz turbo),20线程。 - -ARM: - -| 板卡 | SoC 型号 | CPU 型号 | GPU 型号 | NPU 性能(Int8) | -| ----- | --- | --- | --- | --- | -| Khadas VIM3 | Amlogic A311D | 2.20GHz 四核 Cortex-A73 + 1.80GHz 双核 Cortex-A53 | ARM G52 | 5 TOPS | -| Khadas VIM4 | Amlogic A311D2 | 2.2GHz 四核 ARM Cortex-A73 + 2.0GHz 四核 Cortex-A53 | Mali G52MP8(8EE) 800Mhz | 3.2 TOPS | -| Khadas Edge 2 | Rockchip RK3588S | 2.25GHz 四核 Cortex-A76 + 1.80GHz 四核 Cortex-A55 | 1GHz ARM Mali-G610 | 6 TOPS | -| Raspberry Pi 4B | Broadcom BCM2711 | 1.5GHz 四核 Cortex-A72 | *未知* | *无* | -| Horizon Sunrise X3 PI | Sunrise X3 | 1.2GHz 四核 Cortex-A53 | *未知* | 5 TOPS,双核伯努利架构| -| MAIX-III AXera-Pi | AXera AX620A | 四核 Cortex-A7 | *未知* | 3.6 TOPS | -| Toybrick RV1126 | Rockchip RV1126 | 四核 Cortex-A7 | *未知* | 2.0 TOPS | -| NVIDIA Jetson Nano B01 | *未知* | 1.43GHz 四核 Cortex-A57 | 128 核 NVIDIA Maxwell | *无* | -| NVIDIA Jetson Nano Orin | *未知* | 6 核 Cortex®-A78AE | 1024 核 NVIDIA Ampere | *无* | -| Atlas 200 DK | *未知* | *未知* | *未知* | 22 TOPS,Ascend 310 | -| Atlas 200I DK A2 | *未知* | 1.0GHz 四核 | *未知* | 8 TOPS,Ascend 310B | - -RISC-V: - -| 板卡 | SoC 型号 | CPU 型号 | GPU 型号 | -| ----- | --------- | --------- | --------- | -| StarFive VisionFive 2 | StarFive JH7110 | 1.5GHz 四核 RISC-V 64 位 | 600MHz IMG BXE-4-32 MC1 | -| Allwinner Nezha D1 | Allwinner D1 | 1.0GHz 单核 RISC-V 64 位,RVV-0.7.1 | *未知* | - -我们的目标是在边缘设备上进行高效计算!在过去的几年中,我们(OpenCV)中国团队,已经在dnn模块针对ARM架构优化的方面付出了大量努力,特别关注卷积神经网络的卷积内核优化和Vision Transformers的GEMM内核优化。更值得一提的是,我们为dnn模块引入了NPU支持,支持Khadas VIM3、Atlas 200 DK 和Atlas 200I DK A2上的 NPU。在 NPU 上运行模型可以帮助将计算负载从CPU分配到NPU,甚至可以达到更快的推理速度(例如,在 Atlas 200 DK 上 Ascend 310 的测试结果)。 diff --git a/reports/2023-4.9.0/opencv_zoo_report-en-2023-4.9.0.md b/reports/2023-4.9.0/opencv_zoo_report-en-2023-4.9.0.md deleted file mode 100644 index 122bb136..00000000 --- a/reports/2023-4.9.0/opencv_zoo_report-en-2023-4.9.0.md +++ /dev/null @@ -1,56 +0,0 @@ -# OpenCV Model Zoo Report - Models, Boards and Benchmark Result Analysis - - -[![benchmark_table](assets/benchmark_table_4.9.0.png)](benchmark_table) - -[OpenCV Model Zoo](https://github.com/opencv/opencv_zoo) was started back in September, 2021. Since then, we have collected 43 model weights covering 19 tasks and added 13 hardware setups covering different CPU architectures (x86-64, ARM and RISC-V) and different computing units (CPU, GPU and NPU). All these models and hardware are fully tested by us and guaranteed to work with latest release of OpenCV (currently 4.9.0) as our benchmark table shown. - -## Models - -As of this release, we have 43 model weights covering 19 tasks in total in the zoo. These models are collected with licenses in mind, meaning you can bascially use all the models in the zoo for whatever purposes you want, even for commercial purpose. They are collected from mainly 4 sources: - -- OpenCV China team. The YuNet model for face detection is developed and maintained by one of our team members. -- OpenCV Area Chair. This is a program started by OpenCV Foundation, details can be found [here](https://opencv.org/opencv-area-chairs/). The SFace model for face recognition and FER model for facial expression recognition are contributed from one of the Area Chairs Prof. Deng. -- Cooperation with OpenCV. The HumanSeg model for human segmentation is from Baidu PaddlePaddle, and the modified YuNet for license plate detection is from [watrix.ai](watrix.ai). -- Community. Started from 2022, we have project ideas for model contribution in the Google Summer of Code (GSoC) program. GSoC students have successfully contributed 6 models covering tasks such as object detection, object tracking and optical flow estimation. - -We welcome your contribution! - -Besides, demos in Python and C++, which work out-of-the-box with latest OpenCV, are also provided for each model. We also provide [visual examples](https://github.com/opencv/opencv_zoo?tab=readme-ov-file#some-examples) so that people can better understand what the task is and what kind of the output is. - -## Boards - -There are 13 hardware setups in the zoo, one of them is a PC with Intel i7-12700K, and the others are single board computers (SBC). They are categorized by CPU architecture as follows: - - - -x86-64: - -- Intel Core i7-12700K: 8 P-core (3.60GHz, 4.90GHz turbo), 4 E-core (2.70GHz, 3.80GHz turbo), 20 threads. - - -ARM: - -| Board | SoC model | CPU model | GPU model | NPU Performance (Int8) | -| ----- | --- | --- | --- | --- | -| Khadas VIM3 | Amlogic A311D | 2.20GHz Quad-core Cortex-A73 + 1.80GHz Dual-core Cortex-A53 | ARM G52 | 5 TOPS | -| Khadas VIM4 | Amlogic A311D2 | 2.2GHz Quad-core ARM Cortex-A73 + 2.0GHz Quad-core Cortex-A53 | Mali G52MP8(8EE) 800Mhz | 3.2 TOPS | -| Khadas Edge 2 | Rockchip RK3588S | 2.25GHz Quad-core Cortex-A76 + 1.80GHz Quad-core Cortex-A55 | 1GHz ARM Mali-G610 | 6 TOPS | -| Raspberry Pi 4B | Broadcom BCM2711 | 1.5GHz Quad-core Cortex-A72 | *Unknown* | *No* | -| Horizon Sunrise X3 PI | Sunrise X3 | 1.2GHz Quad-core Cortex-A53 | *Unkown* | 5 TOPS, Dual-core Bernoulli Arch| -| MAIX-III AXera-Pi | AXera AX620A | Quad-core Cortex-A7 | *Unknown* | 3.6 TOPS | -| Toybrick RV1126 | Rockchip RV1126 | Quad-core Cortex-A7 | *Unknown* | 2.0 TOPS | -| NVIDIA Jetson Nano B01 | *Unknown* | 1.43GHz Quad-core Cortex-A57 | 128-core NVIDIA Maxwell | *No* | -| NVIDIA Jetson Nano Orin | *Unknown* | 6-core Cortex®-A78AE | 1024-core NVIDIA Ampere | *No* | -| Atlas 200 DK | *Unknown* | *Unknown* | *Unknown* | 22 TOPS, Ascend 310 | -| Atlas 200I DK A2 | *Unknown* | 1.0GHz Quad-core | *Unknown* | 8 TOPS, Ascend 310B | - - -RISC-V: - -| Board | SoC model | CPU model | GPU model | -| ----- | --------- | --------- | --------- | -| StarFive VisionFive 2 | StarFive JH7110 | 1.5GHz Quad-core RISC-V 64-bit | 600MHz IMG BXE-4-32 MC1 | -| Allwinner Nezha D1 | Allwinner D1 | 1.0GHz single-core RISC-V 64-bit, RVV-0.7.1 | *Unknown* | - -We are targetting on efficient computing on edge devices! In the past few years, we, the OpenCV China team, have spent most of our effort in optimizing dnn module for ARM architecture, focusing especially on convolution kernel optimization for ConvNets and GEMM kernel optimization for Vision Transformers. What's even more worth mentioning is that we introduce NPU support for the dnn module, supporing the NPU in Khadas VIM3, Atlas 200 DK and Atlas 200I DK A2. Running the model on NPU can help distribute computing loads from CPU to NPU and even reaching a faster inference speed (see benchmark results on Ascend 310 on Atlas 200 DK for example). diff --git a/reports/README.md b/reports/README.md deleted file mode 100644 index 623e0d9f..00000000 --- a/reports/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Reports on models and boards - -Here we present reports on models and boards in the zoo per major release of OpenCV since 4.9.0. diff --git a/tools/eval/README.md b/tools/eval/README.md deleted file mode 100644 index d30829e3..00000000 --- a/tools/eval/README.md +++ /dev/null @@ -1,228 +0,0 @@ -# Accuracy evaluation of models in OpenCV Zoo - -Make sure you have the following packages installed: - -```shell -pip install tqdm -pip install scikit-learn -pip install scipy==1.8.1 -``` - -Generally speaking, evaluation can be done with the following command: - -```shell -python eval.py -m model_name -d dataset_name -dr dataset_root_dir -``` - -Supported datasets: - -- [ImageNet](#imagenet) -- [WIDERFace](#widerface) -- [LFW](#lfw) -- [ICDAR](#icdar2003) -- [IIIT5K](#iiit5k) -- [Mini Supervisely](#mini-supervisely) - -## ImageNet - -### Prepare data - -Please visit https://image-net.org/ to download the ImageNet dataset (only need images in `ILSVRC/Data/CLS-LOC/val`) and [the labels from caffe](http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz). Organize files as follow: - -```shell -$ tree -L 2 /path/to/imagenet -. -├── caffe_ilsvrc12 -│   ├── det_synset_words.txt -│   ├── imagenet.bet.pickle -│   ├── imagenet_mean.binaryproto -│   ├── synsets.txt -│   ├── synset_words.txt -│   ├── test.txt -│   ├── train.txt -│   └── val.txt -├── caffe_ilsvrc12.tar.gz -├── ILSVRC -│   ├── Annotations -│   ├── Data -│   └── ImageSets -├── imagenet_object_localization_patched2019.tar.gz -├── LOC_sample_submission.csv -├── LOC_synset_mapping.txt -├── LOC_train_solution.csv -└── LOC_val_solution.csv -``` - -### Evaluation - -Run evaluation with the following command: - -```shell -python eval.py -m mobilenet -d imagenet -dr /path/to/imagenet -``` - -## WIDERFace - -The script is modified based on [WiderFace-Evaluation](https://github.com/wondervictor/WiderFace-Evaluation). - -### Prepare data - -Please visit http://shuoyang1213.me/WIDERFACE to download the WIDERFace dataset [Validation Images](https://huggingface.co/datasets/wider_face/resolve/main/data/WIDER_val.zip), [Face annotations](http://shuoyang1213.me/WIDERFACE/support/bbx_annotation/wider_face_split.zip) and [eval_tools](http://shuoyang1213.me/WIDERFACE/support/eval_script/eval_tools.zip). Organize files as follow: - -```shell -$ tree -L 2 /path/to/widerface -. -├── eval_tools -│   ├── boxoverlap.m -│   ├── evaluation.m -│   ├── ground_truth -│   ├── nms.m -│   ├── norm_score.m -│   ├── plot -│   ├── read_pred.m -│   └── wider_eval.m -├── wider_face_split -│   ├── readme.txt -│   ├── wider_face_test_filelist.txt -│   ├── wider_face_test.mat -│   ├── wider_face_train_bbx_gt.txt -│   ├── wider_face_train.mat -│   ├── wider_face_val_bbx_gt.txt -│   └── wider_face_val.mat -└── WIDER_val - └── images -``` - -### Evaluation - -Run evaluation with the following command: - -```shell -python eval.py -m yunet -d widerface -dr /path/to/widerface -``` - -## LFW - -The script is modified based on [evaluation of InsightFace](https://github.com/deepinsight/insightface/blob/f92bf1e48470fdd567e003f196f8ff70461f7a20/src/eval/lfw.py). - -This evaluation uses [YuNet](../../models/face_detection_yunet) as face detector. The structure of the face bounding boxes saved in [lfw_face_bboxes.npy](../eval/datasets/lfw_face_bboxes.npy) is shown below. -Each row represents the bounding box of the main face that will be used in each image. - -```shell -[ - [x, y, w, h, x_re, y_re, x_le, y_le, x_nt, y_nt, x_rcm, y_rcm, x_lcm, y_lcm], - ... - [x, y, w, h, x_re, y_re, x_le, y_le, x_nt, y_nt, x_rcm, y_rcm, x_lcm, y_lcm] -] -``` - -`x1, y1, w, h` are the top-left coordinates, width and height of the face bounding box, `{x, y}_{re, le, nt, rcm, lcm}` stands for the coordinates of right eye, left eye, nose tip, the right corner and left corner of the mouth respectively. Data type of this numpy array is `np.float32`. - - -### Prepare data - -Please visit http://vis-www.cs.umass.edu/lfw to download the LFW [all images](http://vis-www.cs.umass.edu/lfw/lfw.tgz)(needs to be decompressed) and [pairs.txt](http://vis-www.cs.umass.edu/lfw/pairs.txt)(needs to be placed in the `view2` folder). Organize files as follow: - -```shell -$ tree -L 2 /path/to/lfw -. -├── lfw -│   ├── Aaron_Eckhart -│   ├── ... -│   └── Zydrunas_Ilgauskas -└── view2 -    └── pairs.txt -``` - -### Evaluation - -Run evaluation with the following command: - -```shell -python eval.py -m sface -d lfw -dr /path/to/lfw -``` - -## ICDAR2003 - -### Prepare data - -Please visit http://iapr-tc11.org/mediawiki/index.php/ICDAR_2003_Robust_Reading_Competitions to download the ICDAR2003 dataset and the labels. You have to download the Robust Word Recognition [TrialTrain Set](http://www.iapr-tc11.org/dataset/ICDAR2003_RobustReading/TrialTrain/word.zip) only. - -```shell -$ tree -L 2 /path/to/icdar -. -├── word -│   ├── 1 -│ │ ├── self -│ │ ├── ... -│ │ └── willcooks -│   ├── ... -│   └── 12 -└── word.xml -    -``` - -### Evaluation - -Run evaluation with the following command: - -```shell -python eval.py -m crnn -d icdar -dr /path/to/icdar -``` - -### Example - -```shell -download zip file from http://www.iapr-tc11.org/dataset/ICDAR2003_RobustReading/TrialTrain/word.zip -upzip file to /path/to/icdar -python eval.py -m crnn -d icdar -dr /path/to/icdar -``` - -## IIIT5K - -### Prepare data - -Please visit https://github.com/cv-small-snails/Text-Recognition-Material to download the IIIT5K dataset and the labels. - -### Evaluation - -All the datasets in the format of lmdb can be evaluated by this script.
-Run evaluation with the following command: - -```shell -python eval.py -m crnn -d iiit5k -dr /path/to/iiit5k -``` - - -## Mini Supervisely - -### Prepare data -Please download the mini_supervisely data from [here](https://paddleseg.bj.bcebos.com/humanseg/data/mini_supervisely.zip) which includes the validation dataset and unzip it. - -```shell -$ tree -L 2 /path/to/mini_supervisely -. -├── Annotations -│   ├── ache-adult-depression-expression-41253.png -│   ├── ... -├── Images -│   ├── ache-adult-depression-expression-41253.jpg -│   ├── ... -├── test.txt -├── train.txt -└── val.txt -``` - -### Evaluation - -Run evaluation with the following command : - -```shell -python eval.py -m pphumanseg -d mini_supervisely -dr /path/to/pphumanseg -``` - -Run evaluation on quantized model with the following command : - -```shell -python eval.py -m pphumanseg_q -d mini_supervisely -dr /path/to/pphumanseg -``` \ No newline at end of file diff --git a/tools/eval/datasets/__init__.py b/tools/eval/datasets/__init__.py deleted file mode 100644 index 5ed59faa..00000000 --- a/tools/eval/datasets/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -from .imagenet import ImageNet -from .widerface import WIDERFace -from .lfw import LFW -from .icdar import ICDAR -from .iiit5k import IIIT5K -from .minisupervisely import MiniSupervisely - -class Registery: - def __init__(self, name): - self._name = name - self._dict = dict() - - def get(self, key): - return self._dict[key] - - def register(self, item): - self._dict[item.__name__] = item - -DATASETS = Registery("Datasets") -DATASETS.register(ImageNet) -DATASETS.register(WIDERFace) -DATASETS.register(LFW) -DATASETS.register(ICDAR) -DATASETS.register(IIIT5K) -DATASETS.register(MiniSupervisely) diff --git a/tools/eval/datasets/icdar.py b/tools/eval/datasets/icdar.py deleted file mode 100644 index 80b9eb42..00000000 --- a/tools/eval/datasets/icdar.py +++ /dev/null @@ -1,54 +0,0 @@ -import os -import numpy as np -import cv2 as cv -import xml.dom.minidom as minidom -from tqdm import tqdm - -class ICDAR: - def __init__(self, root): - self.root = root - self.acc = -1 - self.inputSize = [100, 32] - self.val_label_file = os.path.join(root, "word.xml") - self.val_label = self.load_label(self.val_label_file) - - @property - def name(self): - return self.__class__.__name__ - - def load_label(self, label_file): - label = list() - dom = minidom.getDOMImplementation().createDocument(None, 'Root', None) - root = dom.documentElement - dom = minidom.parse(self.val_label_file) - root = dom.documentElement - names = root.getElementsByTagName('image') - for name in names: - key = os.path.join(self.root, name.getAttribute('file')) - value = name.getAttribute('tag').lower() - label.append([key, value]) - - return label - - def eval(self, model): - right_num = 0 - pbar = tqdm(self.val_label) - pbar.set_description("Evaluating {} with {} val set".format(model.name, self.name)) - - for fn, label in pbar: - - img = cv.imread(fn) - - rbbox = np.array([0, img.shape[0], 0, 0, img.shape[1], 0, img.shape[1], img.shape[0]]) - pred = model.infer(img, rbbox) - if label.lower() == pred.lower(): - right_num += 1 - - self.acc = right_num/(len(self.val_label) * 1.0) - - - def get_result(self): - return self.acc - - def print_result(self): - print("Accuracy: {:.2f}%".format(self.acc*100)) \ No newline at end of file diff --git a/tools/eval/datasets/iiit5k.py b/tools/eval/datasets/iiit5k.py deleted file mode 100644 index 82b08047..00000000 --- a/tools/eval/datasets/iiit5k.py +++ /dev/null @@ -1,56 +0,0 @@ -import lmdb -import os -import numpy as np -import cv2 as cv -from tqdm import tqdm - -class IIIT5K: - def __init__(self, root): - self.root = root - self.acc = -1 - self.inputSize = [100, 32] - - self.val_label = self.load_label(self.root) - - @property - def name(self): - return self.__class__.__name__ - - def load_label(self, root): - lmdb_file = root - lmdb_env = lmdb.open(lmdb_file) - lmdb_txn = lmdb_env.begin() - lmdb_cursor = lmdb_txn.cursor() - label = list() - for key, value in lmdb_cursor: - image_index = key.decode() - if image_index.split('-')[0] == 'image': - img = cv.imdecode(np.fromstring(value, np.uint8), 3) - label_index = 'label-' + image_index.split('-')[1] - value = lmdb_txn.get(label_index.encode()).decode().lower() - label.append([img, value]) - else: - break - return label - - def eval(self, model): - right_num = 0 - pbar = tqdm(self.val_label) - pbar.set_description("Evaluating {} with {} val set".format(model.name, self.name)) - - for img, value in pbar: - - - rbbox = np.array([0, img.shape[0], 0, 0, img.shape[1], 0, img.shape[1], img.shape[0]]) - pred = model.infer(img, rbbox).lower() - if value == pred: - right_num += 1 - - self.acc = right_num/(len(self.val_label) * 1.0) - - - def get_result(self): - return self.acc - - def print_result(self): - print("Accuracy: {:.2f}%".format(self.acc*100)) \ No newline at end of file diff --git a/tools/eval/datasets/imagenet.py b/tools/eval/datasets/imagenet.py deleted file mode 100644 index 571a89e9..00000000 --- a/tools/eval/datasets/imagenet.py +++ /dev/null @@ -1,65 +0,0 @@ -import os - -import numpy as np -import cv2 as cv - -from tqdm import tqdm - -class ImageNet: - def __init__(self, root, size=224): - self.root = root - self.size = size - self.top1_acc = -1 - self.top5_acc = -1 - - self.root_val = os.path.join(self.root, "ILSVRC", "Data", "CLS-LOC", "val") - self.val_label_file = os.path.join(self.root, "caffe_ilsvrc12", "val.txt") - - self.val_label = self.load_label(self.val_label_file) - - @property - def name(self): - return self.__class__.__name__ - - def load_label(self, label_file): - label = list() - with open(label_file, "r") as f: - for line in f: - line = line.strip() - key, value = line.split() - - key = os.path.join(self.root_val, key) - value = int(value) - - label.append([key, value]) - - return label - - def eval(self, model): - top_1_hits = 0 - top_5_hits = 0 - pbar = tqdm(self.val_label) - pbar.set_description("Evaluating {} with {} val set".format(model.name, self.name)) - - for fn, label in pbar: - - img = cv.imread(fn) - img = cv.cvtColor(img, cv.COLOR_BGR2RGB) - img = cv.resize(img, dsize=(256, 256)) - img = img[16:240, 16:240, :] - - pred = model.infer(img) - if label == pred[0][0]: - top_1_hits += 1 - if label in pred[0]: - top_5_hits += 1 - - self.top1_acc = top_1_hits/(len(self.val_label) * 1.0) - self.top5_acc = top_5_hits/(len(self.val_label) * 1.0) - - def get_result(self): - return self.top1_acc, self.top5_acc - - def print_result(self): - print("Top-1 Accuracy: {:.2f}%; Top-5 Accuracy: {:.2f}%".format(self.top1_acc*100, self.top5_acc*100)) - diff --git a/tools/eval/datasets/lfw.py b/tools/eval/datasets/lfw.py deleted file mode 100644 index c001b3f9..00000000 --- a/tools/eval/datasets/lfw.py +++ /dev/null @@ -1,239 +0,0 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import numpy as np - -from sklearn.model_selection import KFold -from scipy import interpolate -import sklearn -from sklearn.decomposition import PCA - -import cv2 as cv -from tqdm import tqdm - - -def calculate_roc(thresholds, - embeddings1, - embeddings2, - actual_issame, - nrof_folds=10, - pca=0): - assert (embeddings1.shape[0] == embeddings2.shape[0]) - assert (embeddings1.shape[1] == embeddings2.shape[1]) - nrof_pairs = min(len(actual_issame), embeddings1.shape[0]) - nrof_thresholds = len(thresholds) - k_fold = KFold(n_splits=nrof_folds, shuffle=False) - - tprs = np.zeros((nrof_folds, nrof_thresholds)) - fprs = np.zeros((nrof_folds, nrof_thresholds)) - accuracy = np.zeros((nrof_folds)) - indices = np.arange(nrof_pairs) - # print('pca', pca) - - if pca == 0: - diff = np.subtract(embeddings1, embeddings2) - dist = np.sum(np.square(diff), 1) - - for fold_idx, (train_set, test_set) in enumerate(k_fold.split(indices)): - # print('train_set', train_set) - # print('test_set', test_set) - if pca > 0: - print('doing pca on', fold_idx) - embed1_train = embeddings1[train_set] - embed2_train = embeddings2[train_set] - _embed_train = np.concatenate((embed1_train, embed2_train), axis=0) - # print(_embed_train.shape) - pca_model = PCA(n_components=pca) - pca_model.fit(_embed_train) - embed1 = pca_model.transform(embeddings1) - embed2 = pca_model.transform(embeddings2) - embed1 = sklearn.preprocessing.normalize(embed1) - embed2 = sklearn.preprocessing.normalize(embed2) - # print(embed1.shape, embed2.shape) - diff = np.subtract(embed1, embed2) - dist = np.sum(np.square(diff), 1) - - # Find the best threshold for the fold - acc_train = np.zeros((nrof_thresholds)) - for threshold_idx, threshold in enumerate(thresholds): - _, _, acc_train[threshold_idx] = calculate_accuracy( - threshold, dist[train_set], actual_issame[train_set]) - best_threshold_index = np.argmax(acc_train) - for threshold_idx, threshold in enumerate(thresholds): - tprs[fold_idx, - threshold_idx], fprs[fold_idx, - threshold_idx], _ = calculate_accuracy( - threshold, dist[test_set], - actual_issame[test_set]) - _, _, accuracy[fold_idx] = calculate_accuracy( - thresholds[best_threshold_index], dist[test_set], - actual_issame[test_set]) - - tpr = np.mean(tprs, 0) - fpr = np.mean(fprs, 0) - return tpr, fpr, accuracy - - -def calculate_accuracy(threshold, dist, actual_issame): - predict_issame = np.less(dist, threshold) - tp = np.sum(np.logical_and(predict_issame, actual_issame)) - fp = np.sum(np.logical_and(predict_issame, np.logical_not(actual_issame))) - tn = np.sum( - np.logical_and(np.logical_not(predict_issame), - np.logical_not(actual_issame))) - fn = np.sum(np.logical_and(np.logical_not(predict_issame), actual_issame)) - - tpr = 0 if (tp + fn == 0) else float(tp) / float(tp + fn) - fpr = 0 if (fp + tn == 0) else float(fp) / float(fp + tn) - acc = float(tp + tn) / dist.size - return tpr, fpr, acc - - -def calculate_val(thresholds, - embeddings1, - embeddings2, - actual_issame, - far_target, - nrof_folds=10): - assert (embeddings1.shape[0] == embeddings2.shape[0]) - assert (embeddings1.shape[1] == embeddings2.shape[1]) - nrof_pairs = min(len(actual_issame), embeddings1.shape[0]) - nrof_thresholds = len(thresholds) - k_fold = KFold(n_splits=nrof_folds, shuffle=False) - - val = np.zeros(nrof_folds) - far = np.zeros(nrof_folds) - - diff = np.subtract(embeddings1, embeddings2) - dist = np.sum(np.square(diff), 1) - indices = np.arange(nrof_pairs) - - for fold_idx, (train_set, test_set) in enumerate(k_fold.split(indices)): - - # Find the threshold that gives FAR = far_target - far_train = np.zeros(nrof_thresholds) - for threshold_idx, threshold in enumerate(thresholds): - _, far_train[threshold_idx] = calculate_val_far( - threshold, dist[train_set], actual_issame[train_set]) - if np.max(far_train) >= far_target: - f = interpolate.interp1d(far_train, thresholds, kind='slinear') - threshold = f(far_target) - else: - threshold = 0.0 - - val[fold_idx], far[fold_idx] = calculate_val_far( - threshold, dist[test_set], actual_issame[test_set]) - - val_mean = np.mean(val) - far_mean = np.mean(far) - val_std = np.std(val) - return val_mean, val_std, far_mean - - -def calculate_val_far(threshold, dist, actual_issame): - predict_issame = np.less(dist, threshold) - true_accept = np.sum(np.logical_and(predict_issame, actual_issame)) - false_accept = np.sum( - np.logical_and(predict_issame, np.logical_not(actual_issame))) - n_same = np.sum(actual_issame) - n_diff = np.sum(np.logical_not(actual_issame)) - val = float(true_accept) / float(n_same) - far = float(false_accept) / float(n_diff) - return val, far - - -def evaluate(embeddings, actual_issame, nrof_folds=10, pca=0): - # Calculate evaluation metrics - thresholds = np.arange(0, 4, 0.01) - embeddings1 = embeddings[0::2] - embeddings2 = embeddings[1::2] - tpr, fpr, accuracy = calculate_roc(thresholds, - embeddings1, - embeddings2, - np.asarray(actual_issame), - nrof_folds=nrof_folds, - pca=pca) - thresholds = np.arange(0, 4, 0.001) - val, val_std, far = calculate_val(thresholds, - embeddings1, - embeddings2, - np.asarray(actual_issame), - 1e-3, - nrof_folds=nrof_folds) - return tpr, fpr, accuracy, val, val_std, far - - -class LFW: - def __init__(self, root, target_size=250): - self.LFW_IMAGE_SIZE = 250 - - self.lfw_root = root - self.target_size = target_size - - self.lfw_pairs_path = os.path.join(self.lfw_root, 'view2/pairs.txt') - self.image_path_pattern = os.path.join(self.lfw_root, 'lfw', '{person_name}', '{image_name}') - - self.lfw_image_paths, self.id_list = self.load_pairs() - - @property - def name(self): - return 'LFW' - - def __len__(self): - return len(self.lfw_image_paths) - - @property - def ids(self): - return self.id_list - - def load_pairs(self): - image_paths = [] - id_list = [] - with open(self.lfw_pairs_path, 'r') as f: - for line in f.readlines()[1:]: - line = line.strip().split() - if len(line) == 3: - person_name = line[0] - image1_name = '{}_{:04d}.jpg'.format(person_name, int(line[1])) - image2_name = '{}_{:04d}.jpg'.format(person_name, int(line[2])) - image_paths += [ - self.image_path_pattern.format(person_name=person_name, image_name=image1_name), - self.image_path_pattern.format(person_name=person_name, image_name=image2_name) - ] - id_list.append(True) - elif len(line) == 4: - person1_name = line[0] - image1_name = '{}_{:04d}.jpg'.format(person1_name, int(line[1])) - person2_name = line[2] - image2_name = '{}_{:04d}.jpg'.format(person2_name, int(line[3])) - image_paths += [ - self.image_path_pattern.format(person_name=person1_name, image_name=image1_name), - self.image_path_pattern.format(person_name=person2_name, image_name=image2_name) - ] - id_list.append(False) - return image_paths, id_list - - def __getitem__(self, key): - img = cv.imread(self.lfw_image_paths[key]) - if self.target_size != self.LFW_IMAGE_SIZE: - img = cv.resize(img, (self.target_size, self.target_size)) - return img - - def eval(self, model): - ids = self.ids - embeddings = np.zeros(shape=(len(self), 128)) - face_bboxes = np.load("./datasets/lfw_face_bboxes.npy") - for idx, img in tqdm(enumerate(self), desc="Evaluating {} with {} val set".format(model.name, self.name)): - embedding = model.infer(img, face_bboxes[idx]) - embeddings[idx] = embedding - - embeddings = sklearn.preprocessing.normalize(embeddings) - self.tpr, self.fpr, self.acc, self.val, self.std, self.far = evaluate(embeddings, ids, nrof_folds=10) - self.acc, self.std = np.mean(self.acc), np.std(self.acc) - - def print_result(self): - print("==================== Results ====================") - print("Average Accuracy: {:.4f}".format(self.acc)) - print("=================================================") diff --git a/tools/eval/datasets/lfw_face_bboxes.npy b/tools/eval/datasets/lfw_face_bboxes.npy deleted file mode 100644 index d3988c31aabd9867430b27768e303b158a0f0dfb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 672128 zcmceeWm{ES`?mp6?C$QC?j9%IAtC}If+!%}A+hK#yJOpW+ug0(#BSXpcDG_Z9ld1Vw!pMKrSUcyAcr@6UJKy3c zIHZT}W;aH==Fw%gGbfw^=y)uJMUH|AmerW9%jMUDN_w$wld@8~c9w@w-(f)3=+WKEDLZd1?HsQ%2hOmK;&~Dt-(Zf$JkXR_L1WUdx5w zld_qv>cXJ~{xCO@r0LLlY!R9@>11fi&yo&Uwy6`Neyd}?&zld1x{T4a#c)a*UOnvC z(8Qh@FQZxA#E_s}Mbz8IG3`ZBqvPdh_Q6s8j!5#=B}pTR{`$r&IA%x9=NwY6+cW34 z12103uvEpE^AD@|;FiYte${N7)PnJ zFPnB3su^`kje)%%i_VKpNlQ!RcBKKzcU{Q-r+~F~UYKm~<#l@n&3frFD7^;n{aK_8 zn9V%bt{C@wD3;DL#5693*W0xzY~sY#HwrRdIrB~3p1Jcfs0`63zjqDgg=rKVsNvd> z-y-Sd1F>(lE-eXVN|-)D@11xZp+NnEJ+CaMuw`T%n=0XnZW#+ka&>^T#n9|Hl!DP7C~zI{Qc&|d4&cy z5?yIBB8`*Ujx^ov#N6G^T$FXwxzR_Y+YiGI^ zD@b_iKvzO|8jwo7T-WcF%ScMj=0D9+j-BYkiSsJNbWms02Pay#)o%!$jp zy=fVeK|r`6U8BqBe=~#AHWehDYR&hhO&Ogsg8Q@07@lp)st+y%uFqz1v^^@VgHiim zxZF#QY%(h%%rO_|5BW4~Y{Iegf5e?LW5}tu=Zm2sQ}&Oe)+htNCbsB=htoABk_r0b zF#lML{*od#1m}_$*$(@H^WxrJeH=GMGa}ju&-;!P-pr@qp_9zp0sI=A#UW$C+3X62 z9?YfJhYGfw>CAuU8pKYsF&v67T7o5#%R|mA4dNGVhI<2=PHnN0{(=ssp zQQYY3)_v~8m|tgvr}`K^UH2e3-I$AS>}Yu)iypVFakw^)p4;O1SLUTh>iO7x&7jHt zT+ZaTq_x3o@gZK9dc$CTshglb%ZU+P@+k6i;Aq2SwttLes-_J$2bFUmCzoLDGVHEK+`H zurS=8rN8wk>}7|`_-qQ~XPV1o)LkQ3{@Ro-9}5|uBiGBXLfVdNP4R?VqIl&P{^{aP zy{?h0MK);nQc&pZ$jlz&_%$>UvvY=Y&nRHy*d+87MHnnnB6#B?(Yj2V&F7|a!9$Py z$2LR_QQ+0ik-fze*tjT~o+tHa`JsfeektT{D8naKnW3W|iOs>fTnh~&?U*rrairhg zJXZ9wC;W9NAHK)X^piF7*Opg!gBaoa7)Xj00eTqE9X@S*#&4D>#G zFz%N<$A%WspyEaB{rQM~`K0L2!8fG??Jw*R^R%JjLJr3gG^tu?&q&KWT3b6&yU`gR z^K2qF>f`EB&Ah(Z%(JVZB)S>Cd2d8woeiBMGw9>5&3|1SsaRUTydJLT7JD%@I|a>L z9a?poMdIu%*6y3#=}Q&IR)8lIPQkoD<^E%DOlLgjW~wvaKu-ORL2QoVK;$}^?LYOd*TwB z&Dy`Vd>lBLwY9PE)d{U-tC^dLrk6F7`nvFa zOezV=whV1v!SLBRG%PA1t@|%gQq~}z-;;fQP$=K3bm>v&!opK|D7>9`bT5Rf?Gw?w zZo%Fkl}!DUPgmti_Gz`lLA({~Wgd8V)t|ws2E;nLp!7&S&uR87@$sSg>v(FfTXS(w z86nC!a`ySZk9X^Ey{LZ<;%t-_0pEk@kuUSab4N@s=Q4AtEAJ*x;`{exoR64tZD=|E!?GE7 zw1V2Qrff0(CO*qL?AUKI>G2kPU1E=}Qa)*RZfFb*rnWSW72}NgnO4fgmKl@;RS?(w zhnP0=wdme+IE#CjvFWxx?{i@ApcE2P!7^bYAN~twqrEwSFS7|Bp1`3G*<2ajj7pb! zaYoIM->3cfoaV+CcWZv0FJWkt@eJ%Ron3zu=($6W6AgL9Z;c~Nsl3tGtqbeOf!xc& zJl_Q8cd102)8qI>dol}iiSx7Nl=WEhSI4n%l{p)gWhQy2V9&h@s)C#1@#?i`9cw_V zqyCIa(nq5`9PX5ZIBLiGf^n>VGM(Dtx?G)A#Ns*0r2Z_y@9_sQwfQ^IW~DKX5gXJGe+1ul}a96u0&MnZur$;cw|5|cX z_Af{0=HeG>%lr08gm0bB{1G;2eJf$uoh)*em-5el-SL^eO=QlqfD{D}&ue4y&W8F` zdAOgjr*)-0=9O7|Y;VBdsw!3t$Ye!vH5=}>V1?I1vC_-{^Ca1qA2y(un_!bp4n0TN z5SZ&l-HdQ1ztuhB%Eq&(bbcG zW6pHSRJE`_UWCu~aK6iax7nwbEXn#;Ji2K{W@?t~k;m|$(?o*yXQFsu&yZXnIlE?X z$;Xu!esg%4QcU)Sa=NOwA*blFnA^h$>#R8TpHs(1*_~r!({R$VrS^&|tB)q2-yaq{ zu3({#5#2V6 zrp-#wy!BNyY5P-D$@MV8FPPbr4S2oT3KuOo7h5_pb<`wEe@5bYMw7KWO40t7PSV?o zM#md9u@lR_yc3)9&A754gtB|ujGgbnv3EJVoNLE^i8&Z8Pv-k{I|g^Crt`xBeqJc! zwsRX=G@KQ2(c1i&5rdkoA?MFIaBjYwr>8lh<2#<$NlEZpy5+}_3KJq9*r1f1&8ADTAKW+r-yMm}j~UG&P{;ZnUa z*XQa|x62icpN9Ofv}4Yk9PaG4rKta8hW~FWQx92kJ-LXBp0Y>KDdJ2I6$ae+B*vy| zGgrlzx$SJZyTG2m4~lShwZ?Nz8VBx8<<(FRj+U2DwV;TQPesgI)|NSSw}eubF+M)w zL{;i@b*eQV+7_@N*#WyaZ|+=8X1b9fMqcF{5ya~o!=wHJ~^*kJYRB&ujJ30=#Ahx)fkdrurm!H(R7U9I; z(kxEiaAj9}8-}T5qrX*~A-**X3QJ`C5;^zly%Y|2YDJ3%BV1m`@o1ths$p_I8k^6` zxgNNG3t{j;**mEiU|du|bXFGq_f|3LWhaiWsuMkxblBW{Dm8KrpWoYwu^X~EHQo-x zweIv>p1|U#7KEi$5M?FtxZ`Ey>bwyXmF@|hP<32FCh_Z&9_8B|Fm_2Nu!B8n%Yw-q z8-snO1$m_)^s;u^q}cX_@Ph17CUlJ<+3x`o|P5j$oeGnb&eTFzLrsU zDT_OzoGs>U7&rK%82?&}H+Lsu6<|iBNU0cnH>~vOJ&unzO zId8ji*Y<%J@1a9R$z=94HK4hL4f%5B5#bIjuXMqUP5HTs4UOkqzd6O1{_m*;IO+4pMmDAVqmIz;TLu``x$&okHDIc!K zbR8GcvK7qf>C7_I2%P+9@MpL=bvG)xtCK-=zWi)tyvANP=RsRex%?I?=Z11*mNivf zELpk8p7;y}AFsRMz1@#ZqoWyc)|9i?3fTHR36t+~4*YaMgzqU5%l-|d$;V1ur;H)y zxiudx3z&P&oFlI%lXqA_PXhzFUuKf^wt(w>tNHY-IhiBw2wH&YwnV}Q4CQQw8yn}R zv82=rouzg}HH~G{D-(`wlKA{41^0@|DEr!(YX=%c=0q(_x4O~!wFNb+ZMeH5kLKH) zSX|=62;V3=KQX1Wc`@;I>D-AfrH8mAqK5qtE{R6O#Cu~GrO(anPE-bD(RK!`QY7G% z97XwG*`wQ+v(qA%ZU>7|3T;c%+t0+}W5el@VUPD_OS=16W3e|EoyiVF{5y$+=~0;M zvS8oBJZ{ZQXZWT9-d^rYarrCZy;h5VdwX)My)h#)tT>je;KmR;MyA>EJ~o2+>x^ja zUL?O)A`dq-K3-sacY;>F5x38dME{rryfxwD6Gytqe7#!Lp6@ch)N95tBS3NsSw*aJ z&BAF|A-@s_qw@5wh_=(GYR5z_Nc?5*D+iRDq~N>;s?J;UV6n`7RUY)Lko|70>_LAP zGdAeDP&)chOw^Hhey>Qb)EjWi)RrCY3NHIOvtm>@UkcLrm8!!e?Mjv!CGciMO{3%O zsBXbsYbBVa#hnSVzg=KJ@cD6^2vMMtCvn%XAl&s5vG3}{Cd*2`b}OL2C~fq8Ha4E~ zV^vOVxFx!2YjVRh7}IB_d>rn`@+Y}0k95NKO$guKCbCdsc$+2uytOWy2|Y_0U8BOi z;{C#{K$oD-VcfiJNX2^>KJUpTrOciVAEz;+dnU;v99VZ&@-Mo1T%IXwf78#RmHGjZ zeZrX7qF8>K>GE}mEptxg@M@+Nr?f*jTph=$lY-_$OV}mnvBw6*lr<<*aN>ZNm!QWr zl_*+_)#Y@S4HeD`zOS;C@qPH`Spr(wMzsB1L{LOJgQpiaI$rHFRhAstB?g|>$JjQM zrZV5<=ULP2Y5||WIn&EOh@QdexUMy1c1i)WoYN`pA#uV+-e+UuHLGk8p1Lw4$jX`I?!!PoGnq(6Nq@@4&Vdo=}p zxm2%NxpHlD0T&-l;7Ls|N&jZktA_zQCX{h!NfITatC<}6Uc6lXL#!I7OPZKUn*#>) zU+Ko}>p7_CjpNpMiO=ts_1jJl@8K1^`J2l98C5iwyy3nXpG1AUDcXi9#2wQmG|8Q3 zuKDZ_^}x{H6IZ`9LVFqG)nX32O%xnIbaWBgDamABDnN7f5AogYj%enf$GiohoX9icW@|^f{K~=ikO%52 zft1;$lDJS0r3JE|cS}NB&Jm4`H&N%Q=-dCjnBUEuP?sP^t61VF^KD6OA+LVAur4Zr zw(4=@+_%PeVg+qKDEMET#1$KPpN)+-^_D6lbpML(Z?su_Y#f>)pnBDg2UiNPT~De=2ZAKN$n{4}tnpXApjyeQz^J4cSak7VWPXwI2B@V03= z-_{oJ-l1jyRo@O?Im7Z$N) zh{XAJv|{<3@8ZH5J)G z?sQjT^o8rfP>LAleq7pONA0A&%kC!F)uut zqMh(Ttmvmr!6qLJ#~G3J&KaYBa>z(_<9Nv=&dUAXM`3}@>{1+`W=dY8j9b%GaP53r z9MROG$S{T}m8Lvx>&o;#a*jLfN%{2@h>&f~eUyO89~cCtrI;IC;0=4<`YN|;G{b`@9D${T%MjmB?bee;kgx}n3i z@u4hFv}CZ+n0GXI&y(!a2jwsxI)KI)tpFy54u9qG)H zc$RjkBjHy0+#BkQj;bfi)zTTc$b@kJYNjsE##{2+Pt8Ay%5Epcrms3IJRC{K)??5u zlUTe*I*S}!2#+7fwu_k6FauHFO+%_=yx=e zo+T;xmkL}m=d*8i0Uh_uMenT&4IfolCVBVYfn)i&$cXJ(P7Kvi@L-rT&T)YZON-)x zsySN+l`^S1lPwYlJ^kpZ$k2Z*o=E&`e?tJ(SB&^^Nn&$da=9_hlY@&VQQb0`y=O*q z>{k&kts@v`SHYf*N`!s?EWWRnc%oe-4+k3(^TVCJ5&5*x@nYbPU~VaA@wvj7;X|ug zKTh(HvuYY0&*Y3UZo%)xw&v=5>E%a*9PO{FIT6ykfL*1YxOs-NZ+bGf4w~|CR|%J= z&7gUGd86Y6uc#OIhQF0~$Vk>Fxlwllet6rl$Ul>>aZdD}7Qu~qNetX=M(o!@iV~wK zXwv^R?*Pa|}Ojj3e=sF@;)oI8|mde1#Lw?56PbVFX=znlLWCkeLyQ446>N zhE>YEyZTP(FVyA9^+}}FnbKVH50#%}e|FBD=PqF~cP7I>Quh>B#u7Qx9SW{MrB!Q8 zORtG*yN%Fx&&EGlgNvbK+1yNG)breUGv9^rhS`jKZN$Div+&W#s|azgb{77zb8@mdy|d<4%W zPn;liQD57;k!3f7{&MbdtM}x}@<@EIOO2RI85!f`e)v{QTwpuee5@BvpN#pR#EN%X z>R`Rxi_l61{hQj-y^}AIQm@c&j1vVhvv{~!a=DfjY}(X@f@N>TzrVE!$c$m0x;~vA zcoOp_hc^Wtq_6Yl`TB^K{yAG!tuy>=09~pUl+Ug8nN?3Bx|Lf#jTk;O_Xx*>Fv&> zu9Bm1%%RI<3&u3BVR+{(*4?Wn?QaK(Ei2K)QHL|@{3wufYN6zNy?5uZ?xz#M8om_$ zFN)_nR(Srdz_w!!RUf3TVeVgHt@t7`t+e4WIFD zQ?M!f9>p-=e;Q;hb0G9xHcB}*Jd(3Vr!I*sYhp(7?Q)5YDfry43~ha7P8+-zQ%4%n z^l3O}wi-#k#EF3i3yFB?M09XCM$O_04Yp*3LnRm2<>Qo9$u0GkEc)_Fw7R4t*G>>; zPwSG?)SjUu?83EIFNAIjeMUP)^Q)C1ue&)?_a}#- z53ZbdbHRFh63e#h;=V>*eEmio{j9~y1V6T4G2qf4SDN}N$d(xEt!Y85*&K`dZ+jke zk-FBqc?@q?%+8Qk;?j^$LY$L4iD3xmYxHr@kh%n81y&Mgy7+Y}hQp#bcH5YhH_GU` zD}ye(%US!rJwHk=i&Rq+OlC%NdWj}W4aVZoG@F<8c1->=mW<6=M0(gWtz9)m5AuoA zEF&kgE1{GA6RodGT*+xFiIQ)2d+ouRyE(KTYLCYgZ%R(4a!q2C32N0CUnrpBdKnI5 zo{9GEyM&J9DUX%O=XONmS=a38H@bj)Emxd1BU$ip25!%dx%s~e$-QJ@*G9%wZ9(Gq zN5W`>I=Ly5rwp>-@>r=`3d>`Bb9aKe1rcJALcN?vBkvbu>OO?E}a6zqk4r&ji-owh1Tv^}L+nBPbZxx-xB)=*31wMLe#P6&X{=;Iib8ZGZ*8+o(dc-MeSgQd>=SP{tr6FI`|#dD zLF@^A#!a+lSBreS-@w8#;iO5e!NfW}jN8=UvsZ!Y)@nRQs^PWtg^0VYiRE^Q)99O$ zu42#nF}dvR=!E;CKxVCs!{Vd`yX*^zGtT5~rQ}r``CJ+sZ&b!l@t}8|m@o5(uX`LZ zA!ckn?~4760?IYKn5QwFO}A#qKHU@%RYmvP*{s=A!<0WK#m3LOgv!G}V#Zf8V?Gp2 zw8ddmIf8)uU=ARPWzpfgtc%ZX+a*F7PxS-E|9fV zQT$!*$T_!C+WX{Ew4sF6{Tsy7gH2d3^_$%<`;ccMaXxKtRJ%%E+ryp5w}UtwlSUJD z*@NA!Bz8$Ij&V}Iwo__Ujg`6bMVDrop7b%-BJsE*neOTAx$h*OQwS>O<8W(ggnyrM z7IjWy{DpF~)Z5Z()I;HDZ_1NyA*@?!PIinX^FQYE`L7c@G`wluFPhFPw29avxxK1H zOp+w-v!E{?X-9?UYD2EBiNH9Z&(gds{A47+B>*Ru-#o9W6{V?WxvrgGPH4D*&3GuJSI2=6kco3>z= z;->J9)WW!MvefJvQglvoi=%R+rdaaZv3`7z*y7X;=6pXXYiUdt`_)SLK39p0ufK|e zF=JRi$zAsG2KbC{hGE-y&FMW6p#(h1497^WV`za@u!f-ib$|`jIZp zXZ!POo)#xBx#F3bO#KQQPJNTlMn8^1TN@loOPMuZ_8~J0C0EoOE&oU2>19p)>H`_* zV@w-0M*=rXU1hl|A*K__ut}p~fGIEbm(o?%v?t5U(caer!$q}1y?!LaoB6W7$dHAt zoH?11jb%RvIyaxlf@Uo@lJ4^JsH zF5sBd==^dI)`cK+=O~Aq3SmHJI z^#75K>q1$(hWgN}Q=;TO3>m$sh>qTg{5@B~kwu-*TmM0*Mr*QaiZ3x|)G@5F$515= zM>j`C+6nf%#S`zV#pyd`T(XYEIG~(+_8pkox=tK-(q=`Z2Pejv5}@hEsJt8oS=({# z#bjy+C-MG(JMUi=QP4!@@tl0dxBV=<^6ylp-$s0jlKKhHELP8%$uaBB_-j2D@49HSY~4gUUeM;RuNy-%Q(5@M4#%c0I9Vr< zmu$t=*=2ZtmYk4FG2IkuY>0X!{5$EgrZ}8>B|}bav1e9XE^TdH_$BoyEtV(pQR)xu zip%-CFrAkZDzR>*itna(Vy}TZYo|}*<7+*%o_ca;Vm8gTc}Ne{L>?TFnwKIA%$v(R z;h0HCOa*nHo{IY+*To7gIlpQKQTwmdm`2;-o}P<#t|QL_;#hez0ps3QYz-_$v3mxw zlDBVP_Fc@`d`H~SGC;j#29{bz9G&Mvlu`k+gFH|8r&h%F(ZA|o8l$?bkcoejNc~ZY$LuD! z)L#=RdFo6{p3Jo#5)YDnUBq4acYnD-e}8(NOO@~3fUD`n7`rE7d#My-yLNa_cqJ^B zs549A6L0@9;)KG9>9q=Uw>gutd^{_{l1NW8fyr4X{OqOHGAx#fxyDSGTZ()24C($UqxZkRgwv7tV&#I-6d$lcsf`Jx z%O(HXTtWF(XVg|sW7yPK>dsixscj+tvNz1?AhEpXY7AKXS6nS0L-1oeO12oIQR~X$ zoNV$c9C)-Oi0$8_aL;nUyH_y3Q6= za744Y)H(V`ve3gE=iVjgxMk72gRDzQO^7f2DJD8ZC(~7mrHON(wPg!r^PKxW9C|=5k6SzpH!uX?wo?&8?2C&5|&ta*vI%S)KmA*Aand5k*h~V&V z3sf`8ncqH#C4I~I<=u=^>s3+6dSc||hVws$TMUp1HdH=jD z#rsYUucZ#c|9%Tfmpm0GBSvw(r?0F>(g%^_$lyphE6RMGze&#W(Mf#VrNNqBCEV^8 zM%J3rM&D;+pJ$Q(Cy5E~6`zgF=p%8f>pm8IJPumYzw<4~k%89Hu zR5oQ2pI4eE*LD7JaeRRVE7X#)%ruiaS$p1B7xTHyg?Gw9bhXQ5{mb2IN zyZE*DuK4{|@+;<3nAg$(?;}>c9a4y9fSvT_P2;Uy9EXfdIVgFx*3EL!lK!feOM6kY zsX?5p)aF3<30Ryk#W&WL({6H()wku?f^i&CgyJ&R3a@RYthUSHe1xo@Kh?OL_Eqc+ z)}e*%cru3?v38OR!LncS8EDHxyCA-ek77v&N7`O1VZ)z1sr@Kq|CKgO82UljdyPQt z*I0i2G{bzO9Sh{_qO`;rt%?aOyA{d*q~Bnr+~?vc5|W zX8ZJsxGMXBVFf%n=)fkoNFKP#?-y-Eyjqp?#VIJLt#0&np{pd`YX4B2OOd+fN1>Qm zNx#~DXl9m0>rynm410->UH)9gGP9O!Tdm6YfZ-fHZ9&H;Mg(fO zQvM(Vhe^(ii}J&KQxsD^2qynmMBJ)$7IrV9vGELJ{|co???m9nF&IkR^J)i6o(`~M z?fE=bOU`eJkl3zC1TrGLK8rgEQh!8xjL|iT29UZ`eMKcWZTM{#4E&bFx`dWW!*caa76q zy-hbut}H9V$Uwn+1F189yHk`+-XT7*Wh( zR4aze*=0 zXa{Oj7CVI}YmM1*#1_+BnGY0>9I}+yUs()ct4z7KzJ$ceOgdYXv7*^8kzamF+;NaP zzwcAfwl|iZ5Idg!$z^08H+IwpvSm{$#!?@s*QtkEZcP;@-XAe-I0G>!j{NlV_`aasF$< zjFxs-Tji2|*oA|$LZrSpj_t<`I4|>xVdrG7ZY-hJusIF=Zi>etdNjO>VZMnjHEqU{ z+&PZS?o5NtU*UaL&Q<5U*|6Gx`WS1BBzD=P zr!7Cz-I$g+jVn8i$r&&8pC8gVale$TuYbkCoYP|BU_Ji!l77o71HxCjviD#PC10Fb zusRs?_A}VFRC*#Nm2>GI1#h=B{{3Y(c`lBveF{-7D1C<;P;|u+8(FXVCwTBn z^1gdi)1*&Lll^vO(w`@L*#4Dtywi&q(|f`r%!uKqlX;p?v1G7`oe#nIX|qSE5Hd2M^*UUsVy~hQ59p=WpsR!@p%;?w%0`DcVZ>b^83o5y{J(&X^ zt8nf8MV#I8OuQXv&iX!Kw9(R}&eDZ?sdJoCXf1iBVA|+JlW^4>)DrWdZ zW!6g_(v}ZebQ}@LV=sYjo-M7USEY28D?j^A!#X{N^$k*Qd#Z>L&C=+rQHHOc)P8oU z6^q(Qo$U*MGAYC^LmO6oWCw9g<@i{S)10JKPol`)Yj$vGpyl<9`DmTvlCvN!YF!XGY%>8n| zuW=)1TrOo+jtt)%$f)ELPWxEP99GGS^SQi|^O*XJR{Z+?Lu~ceV!gjR7o#ntzTO6Z z=|6ti-hp>Y0Tho5qiaj)qd6wES1;4~Wm1fz$wr~Ca!9OS9m*By|JGQl#jrLu9JpJA z?m{OLPKIz`l!9F;LwIvw4*eI%a}UnWZ}fHc;VM)tcp$DG5quwCz?Fc}#09x=?t2dX zUO3arbSzt?&b{uwE(fp7p{79kB5P-(az*mm#Y)njrNtF>4~{gEJW!MazEWee|E((# zMUuyx8Ou1K&zP%4Jm{88x>YH*K`rrm)1F(Z!*HD-wHymgcv0s}lze}ewvQ#d?|3Y( zM-w$jpLr$4{4t7Sx8$3LH5~Rca?Vhh`cR^eAZxHSTNbD`s|gSy`FK|bX;pIpPf5L!lFrBZpfZ( zCDcDip}|W&mqy;_`^e7h+4MxLUZc((KVKfp`Y^A*JH2DGsW)?^cgi?6-%Doy23wBT zl+!(3L1ukPqvJL9`!)1$&zYhxB11{`>*uEMQRvWH%Ymi!3hE4O*i_<)y3CK?{xxUV zoC+59l6-e<8F4>blRM<4u(2@amH%|CJLu5nmJ=Jo6lm^;!c;B%j*#yMimnT<}`u%Z-oH!xl106XUf}0*6c6JmwQd>ro2L=e=?R6Ei8B#Dm52(rGG>AJq{9=dU*GqSg}-} zPtW~HSZYGZYAb#Z$){TCkuv`VVqFX(8LnvU$>~n1LnAQWJ4WG=ypKlu6t< zWQ1#fXLi`+GNzdwdOgCKZj!*lX$}-mDrQD~9;%Lo#AN;_+B=^VDO$RcPmH3nN}CHa z?TOUSMzz|JN1LZ`etHs7KjgZtD@A!%GUHB`HTpW=MZZP+K}W>BnQK{&CvLxVX4Pm$P_$YNuE^0~)&94)yrH}bQ-!L9N)8uB?u~chi zOCO>;E1voC)h8RxBvW24sHCrJHa1ZeJoji#C&gpYezi97!I3=G(<3!kdI3&n@zctI zfA_o5d~-DK|I_1$yY#JWOU5^_f?swGqJ7O9alW+~C0i3DW~fb&M5P{R^I4Pjd* z;pq@Y_7!8Pv8=#I;yIc7$~ifp7n|nRi(lUA+_)CNo^^VBj`E`JKsx*6xfh0+t{mSw zgCxn(wK-EsONnzibt|LleKS5=Jrtdq8!@Nt6z=8;+IJCTJ}hL#0Xyznhtf^@P(CCWz$Pdj*Nc=}CK4}=z+dVU&aM=^d?|T!9XmOLOeUv1n&Y#LIC8gy zudNmAuy6c*&RM8Tuk<<*c1@dyk`JG6V!#G78_M1(C^m6q{Z&8i+>7DxJ$?4=DQ2j| zneW^xrDFAOq1g08wEk_tUMFA1%-7+@cG>$hk@=qbxrWp{xsdeJoL}JHBaZZ@5%;nls50WQNVZ)h3&ba8Ycwjjbyi;i<{pIy)EhtL+ zEh;f2G-Luf?F?AC!GXFf(igeRPI^oIh+B2qgO`MRf$iGF6UHITW%U(6Kfo#-mqf| z8}>>4c4vEf&dH?PXL&YJnKe0k<9U8`G}HE0&^t7W_dlxW5~|7!lW!t0NSBfmzI-e; z;fU-Xll$cnRO!moX@Tr`8c)S06Wk;(vwd_1M?aR!Tq}8*H*)XFIj6drKjjW481mX^*isuSIr#a zf4kD{RR~2>(&!v*hUM-G+AYdq@OkWCP&h!mJYdQ^fZ*bs9_F~?Z@IA?~i?t6h?m6B`Uax0ZFlR zYg5LTXV-+~S0((W8*=@m^o4dXWs2mPV}9hYW|Gv|T}ZQ9!SW5H9IdZNm;#O@- z9KxFOaDoOE`SSb$VZxhR@+`5-vM)I5%+n=4>~@&Wu%VI%xm8SgnC$C9<+&%?slQS z&xY`#ne3J46BT%SQtY0K>sN1%de~u`D0z-38;%WGjCy=FLtK~qpT~Qq-H8cGYg0cw;t`v`)DgmsYZ+ds_l3Tkz%i2-jxJ7gLZ6~&N_Pw!kZM+&pVAs>AxN`yeX?Dz7s{&x_rx&=fi}W zvc%PuTT(B*;FTw#<3m{fPI{`oo6;?%3YP|{^P5yd_g5;^KY1!XT-D@+`84)B7$SB` zze_i%d)n_xT+2Wvk4nL6pqbd0kv@ZX$c+U|L?*#c-M_U9aWPyTOJN&%E#o zj$`pZ>dfv@K3IJFa>wd;jyi9XL4MRT^N4)y zldF}9iUZb29O!~4%;~4qg7eZ``oOq9x>knJ`esqFtV!5}az6F+#QtT>+Sl9SRVAQNEj1QEetwAST6tBfH#nITo8R}<<34X7(fpcLd+Ge`ow22pL z3S$v5UkAg|3*fIGfpt5JF?2#_ti5wbjHIuk(})NN1tZ9vbHM@b^LnLOq4&oy>VDis zc}~I4)8z=hR1A|K@+&MA(5!JmjOndTeJg;TAR`_dUIFa2f1Hr_eK}f zUlu{`P!d#KrTBQfFSU~=VlMX}Ps#&PGguW@IeTd>N<+(hYqV1k)NA9h`J@)}AY~ZD zcfo`^WoYwtC?fk^6Q^gGVv;*qGOoIKsB8nzmVA`)xw}}~7VDK`k#j^3ZOy9CyG;&$ zuB}4$#(J^zzxH?^rGg7NoKKz@BBsz08=~kFKg0c*N(AbAlOHm|gudW%q>f3%^~)7_ zxA%{jpYT$=dBFK#bOPqSP{V{h4tV8~4~0fYXb@2uI9O8}ypPjMwcPOsmM3`Miru$ShRZFYc6>!sBK> zPTTVyvxqwJ{Q_8PPeFj04^|J#MVG$XaHy%m*K1jr=`)``qIS@1xG0)tGv7~*-teda z#?ZH&+sggYNLL(s=?irW`Zz(|koU+l!_u&;x(pXo6;Sx(qnId}jEk4Nko{l+7VmXL zI(OtleE9v216-+3XMEN{Pz|{S#oVJDEk^fQ9nli{O=OQA2Q3$Sj2vo!dF`BF`Zp6- zchDzL=mj_Wvl{8Ms_s(E`&|70b1%x%yP{=Bop^d)1D#exV$cq4==88h{WIza8=X+s z#vRv+QsKh=;JFBLf2>n6tF{s|lzXE!`JUK+S{v2Ik!YW-f>8xd*oh=)4-=3Har1*oK?2!Lh6uk} zf(yYZ*zQ_}S7Db$QQ%Xd?q-Ss`58FjI}vM^lF>0H2RqHEg@=crxLpbo=`)iw&cmv( zY(A6cA@b!dvH$sHF(<-|eozTE=1wM;$`YHYhmYfP`yTbwyEU=sd`Sm}K9xApFPnL^ zN_3sy1NA9#=n$z+eytC}qx2A;>V&(|ImlAC!wWkv7=*{b<)As1m{#E18woT*%24jr z77;~{#ORwk_`N&@dKsE{Oor8ihHTut?1U5Dy)bEFDy9VMV*>9H@ueBqxqv=Z_0gE1 zC5QToiI_wkXViKNlt^5mwwCwEWv=8dO^1ZM*uKWh47if}Go1RnMhQAtc0yTmlepei z8_(3ps#Dd$-R))@ewFT0;x z;M}5I_~u&UKszt2^^ZXc^&>aF7q07MpnPWuCQp}x(&B4kVwcJ2*ue`Oq79%Ru|jHK z4qTHR(Cx%b=<9Hocut@1ynLvgjK^NTBAL(gvTqUl7rztx$SGUh(G{K_$v?Gs!@IT; z%#iXGXr7GKs*!lP+y|iEG@!Nw(PXupIPp_VTK`6zQC7wM5q_AN!o8KX4P0_^ajLfi z3ak!( z-9}MQC5xi0unZqA6u^qQz1@qRDC+TDY#2KhADbL-?X4k{D(x_{M;5;2JHa5ro%yL4 zloc4_Kht8^DkMTExD*7o6pG44atTQbfTU= zeFNu)@7efJ#T=51U-$9(U`*TcKpaWb#NS@2Xuq7iM=d*~ZOy?8sXceeWO5#kN6s-l znAlZe6ge1HeE!OMUc<4z*rjw?yttr`{)?iR_2Sy6l&KrRpcOQw%D&!o~H&jah^2jhZ6q)6r>4ZJsL+)_U$UvJ5 z+`*|;!8)V>hPwQBI(5U`Yt16HRU6+zxCb-C#E5yAsX4-Z;D_k z&Nin&zc+u620hTP*Edl@eXq8^7ao%9^p*ShJ6-aTUg?M(?PlUYBy%r&O)xd86wxcP zadLebI{9?RvKybp?Lut?1<%6#?&SC}57deG!Co#l2q>C~$hcU%?@6yZxi+!;^3b}B z@AozXQNQP-xTvLuqVC>k>R<%<)fOn{ympv*l;lCKu-O`cs5*TZ?=Qg+|5T)8Nf95_ z0m+-*iUAYJ3%Vc7owF7;o^iv5+w_C0*y2jHFG}2#v5IVubmIzanMUS_X(_Hb_d;U3 z7s9TMHa>ryg^n*x(e0cST*ee2{Foig-poMu*f>n{WZqzKDUNN;M#~8)l)dDkS^i9z zzhu7cYzQu%F~GV#R+xP~53f2o!cKk`e7Lvy(8~Zvrk3I;z2oJl>BmCcsN$#+T#>_9$JNv@XsQ1;7O5@po6RQ zs;wYTr|(c3=w#<$^Ls~Jx2E@TTq2TwsM8ZxhWs8$_|(6WGg2qCJ9b_~-Ze(|vK$=d z-tF2HSG@U@kK_K1D0=6Q-VZWx|FR|4eXD}$l>&HL&z1SSesv1SasMN}MyleJt}FAt zhFD-sKKHvEbfk}IQ0q)g{u)WIJz0|E(0v{#!A{L$Oz7T?{DqFly*d$Qi!IQy#tc=C z4p@6T7phJ!cyGu&`k+|MTO%-BiGHGEnFu&h3b~7q#GUllVhHE(TZw^a!`z|YDLc-A z5_EZGi%&UWIK+Jaj7^4kTS@-vzHBVzy}+~Xwr~!5D~=vhhxS}@+FZ>rg?q(Dg-ras zYJnYvF-Vt2qEW>Rwyn%gWyGSx<09nk=?9P3AH|f;+E9Av4!aTqtQcmEz5Vm>#M2VB z2g#FD<6caBOpae)8dqPHD(bm835 zIRV*kbWlE)`w(i*gZPeEK0qF6-CD$`rCQiC&KYeY^^x=05l47GdtpI;RzV2%Bu3!u zDr%kgitx8n0iLYON7#9Jc;)>PCl0Eh`2yhm#0Vc(*&xkXf{&BQO@BQL4#Q*MwBHnI zukw)mFAZ-x^J_2n#kxm##ry}x=t#!v$)V(mX*pvSd4N|7&Eb31lf0}%4AODHW&aAe z|jB;+Dq(;WAPN8!bi*;wmnhq0ZCV7M?J z%1Q-j=wgK2*(iRk6BPIbo|W<743>%*ZkA1g}IaKgO+I~Cf} zm;X^L2~s0(bQ)&cn87c|8XZmZP{Tc@vPlp^A4J3Ss{!5~WRB$Qc;2l|a6hsPHEnayoqm*C#|L4N^rV<_NgKcO$%t>& zL1*qke#_PX#HjOy>aJAa;yK`Y{*zLvg8?hD>d%}1_5DfxY;MfI`u;`HP!1NY#>`-xp7IX6p&rF3W zy(a4uQeZQ~5J$U|;??$KG?H&Bf4CdW&;1cLsVZnc+@1O#Ip(1b%qfzu;Z7e6|AwBW z#6t`*!uFx$ou{XujD8QviEfxQp+)$fqrbdqD#Dm=sW74LH#Qqjf78R#&ka|y$mg>& zz~rz})H0ipJe#??wa-P)->*Xd5B+r?qHyt`Cc-19@Q#Sz7#e0GB+D7gJ9}cvk!;BKvw`ln`G_5! zj|o+kFdy0#A3FaL+0~Pw{EC^xS_AYyWshq5f<3m{A>;5&yc-*Xqtna*?NWLM$uv7m z9-xfpm2dBiYd_>LzGghe844uwb8oVn{LaVO7&^!i58b_y*)bYhJ{w}*9%k?dB*P%P z2wlg>V^~3p*m_SB-3PnjftmrHdAnfY6#6wPTo5M$F?MD;=B)(0UY20uxE%Z)SSs`P z*VpK+*mn7oSaD7R9x<~JXTtk(p*^;ekGA^&eL*>qsNBi8$T>c20- zJaHTzv?c#2!UTb3HYop_LzV>nI>Tn+-oOYftha#O#(d~1W#I%h#rs3}9*cS{epOCJ z%61!k{X{*$lz!NnT=nZ5DYpkM*3@A()sEDMj73UOz=A~agtAnQ2&joh0U zz12nARSr-wlVAe*9G-(BL3h6db7O{AE$$f$;{n;3B?9W z2dH(^#)N56WUT39wP_g?pYeX^Sq{C?zr=;lTZCkb9)2ZeL))DDZ{Cg0-=SwEk8`o% zbmT3_g#ScU_DhsvOmHGIwe%wucf^v&uVMrJk8{_1V(AAB>_0aJ{!N(}wSvsn&>0X< zq7k~;9)k_a(0g0~O6r*7{B~Jf81q=XX7*v%oL24E7VG6Ktmda4daYa zc#E2RTq<+Ug>cXAieqIjgn5oWeC@*U(OVPs+}FJPo&y74`bVF7qfccb{$8{~vtlJ` z!V55!{SS&`{)(;#o5alkZQNTQgvq~jptQgN|E}}CWaNl)dRI&KBtmO}G4_2dL*A7f zY_%$v`Mm2U@)$nom1t+jJy2-~#%b$d$w(^<-Jgv_T{~=R>jCQ#k$Ay>N89Bk&~i<} zC=c2Hoi7U8z-G`Bk!C{Q(=UH)UPE5#WqZD-vN7?gHI{2n$2GYGDEDQrKy4|GbM6RK zE|K}?BJ1}T_EHg{-|vgKJ|?Id77zc+S_t_p5Nwc#u5T^Stm%gN)Hs#obg{>|3~QJZ zUO$iii4R?XuGd9Wt`?cov(Wt*c|nR+`1Ua$*y{ifd4Dt?XAZ0nbK}}Y7|J}5(Qo!p zmHZJa6K;w7Cv|WmG!W_)T>{qgJr+&w((>msSj9Q|KQFrtk;J{h>7uXj9#=xJf)4El`K5&?2qWIeAjP?5LY z>q3ROc^B%?3jI|uy>vQ84$j10O&55KcjLV`9}dBm__?JD5l3?oG_hRff2XYH?QPQ+ zTK(>cPpQ-|kEG)k^X$VeFh}wx8?ilYQMAwn7Uz;ty%-pyK+en|cDC%Uz_Q+5@Y(g1 zsGp!m)@T|QE+UWmfDNuuPyQHSkD-BH(9wv)X3np7?B`<2HO|?A-0#b^#lKe09Rnwz zC-q@_`Vx9&k>hBefhpPa59WKp*eMRm+x5_yy6=zW@vvT74C4#;#JrL3$yv}qgd67$ z?oA!!$#?#d4V5@cbW@MU=>fBGD1>}k=EtNzGH`~^(SOf7Q-gXhvNugYk1!XkTEsnj zmJ5#ZzN8WCj9v-!Q0+=V%?)#y^(?`=aQ4!a6l1`#W?}aDjYvPNf-wc|$ne$2uRvRD zIhcddvmJ4DYcR$>jmMKahB(I=XH``)CYjJDZr%%#R(FLG_spFdrz5yT1E)5+um>Uo zMW+Qc} z7l4D~Qs5Dy3+FY}7+8~o=tGsz@ct^^wr>&z^3=I%XW~gA_bklgon0b9T?_kEHq6Gq zmT0I=12pMNKk1x<+>la)-))bi4i7{a)bZw6FzogkV8UtUFI~u=SG2&lO96QMCLBh0 zEzxqJn0j3<_X0&Yy#KGb5^_gWAJ@kHNq#VXt&3rYt+4Zi1eyCRG5Sg%4xZq9>;bUi zCUe3M$fzhL=SIf!ZvHtcqJHD90!H=vleszy)t_?7gImrx$IZ?MlB5@4Y{!1>lQK4DP%qx8q7N zyG$~mOdqJM=e_j*D&9N#pIGG^xyE9IQKe|&|B*kN z&Yaa+MZ{0}BYJPpLNNI+cI+>iwT^u>|4DFfz6~bb@W9veb8wIMHc4<9M(JiV+gb)c z1qDp_tB3*i?6-S51ux#2Vr-%tN{-|q-oYI&B!Nihez1BC$by=SmpZ_c9ft`7klKm1*6X~YPps+2;}dtc_s5sg9>rC&1>QOwqAI7 z8zOCdBr=nKfqDeQYGd*X^A!Qrr+n2qT5M%dyz7ZUOe`zu#L?@wRm5nhTl+`E09 zL&o?GZ6q@9xk!)x$!_E*?{vgspE&3_=#xQG1%3N$NSI@2jcS9d2VRSbH#9NP%M%;_ z(?_4F7O2t6MZYi$%qa~(#qdyEQWoewxsZOF9Go{P#QO23OA8CbI zH4^lj&#&Jfi>D#c^r~>rU{Zp09TMSXB*ml|J&`xzo-muEkL|JK6qqvyFK3JS>;dR` z&IM%-))*a>hRy#}U`s#P>SHm~5$3`qzg2YI+bY)TOhiVC9aQ8DQQzVK*Nr)NzuN_y z*9G95N&@8G8DWZbF~aGizWTfb)+IkhwESP;nn=Ibi8!?I_d8o>Dn9TY5+j{L7DEUM zc%F@%8NF2Vps$>VFztDG>#Tsr`^{oau_`@!?$9bS#ND4ZxKi}L`N08pFTKbmjDy2Z zBV=tZMmN(`_^Xy;rN2D7ylxU1;gfOU*fb~w8eqpgTd0NQ@|nS&1EV0c&WuLBvA~M* zV*IeAuku>4%;!CS&=w!}G>MI$HK1_G8~y0tlD|F`aenOe8exM!>|)r#JnY%iUifvq zjNN5Y98@Zj`QJHkGCBFP8%0Zr3XJQS6`O5}jdm_L`!N?cC$leWVlY~kra{4uyN5L; z=s!IdTTd1v>{bWZ6kQTZ%pJ9choiby6X(m#F>DAsK*DU`?dgi!i{jDewI-6k(x>q; z0TX|dCobc83O~C+GU0+aw$lnjHmAV#j~*=N1wmdE;VtL5&wZ0In9mvq2Qsy@7Ql)Z znd2cP7!cMf>{6eIP)7|c4G)0F2Ef?H9;I&!pzGm|#9`rZXQxcX3VjsLDTTwT6pS{i zz{5v!c>naZc$BS;z%SldA}^5iiR{$c0t`DqMsQvTpNlC7B@f^7MKS*TpeJyD87}uy zV*f#VNHiy*`Mfn$GK`@7-Vu}TWnuOt2W0o9r*A?GZgsK1q!;v6u1LXa=IyU_?u7Kb zJEA$9zMq&dc+Ak`Gsqo3FR%}09ev;X{Sk4K%n)r?bZJ+Cep>X7&Mt!XX*n{K8%1S; zHd2T9<23cE>J)R_q~2IsO08Gk6IOgS)vZ;-lhuV7E*FQc)Kg{sy7M0F(sRH}1%DFz%M7sQ zK68Al_3_Ne37;<&09T#y)g}^MmnOjenh8Q4&PB5yb%J%&0liv9<=$E`j+~RN&EZI| z(IDHI{SEYWfAhD(oI?RPy)6#oOsue$&oMLOeDi55Yr{h?>|oCJ601$)cdmv z^Rdgx6}8O!JdsbQ|B`(^o2#&^E(dw9=VAKBemL6cxi~dQhup3i7-FD>f(mlCMx{gb zl{r4y+rlLx0w4Yga$-v1ua<$0L)lj-g7>?mNHF;&hNN8-2lAK?2%U{Llk{OwVFiz$xv1ki`t@}` z%zu;wNBU-db}L5B*(7usD8=o@M$w6%TSIp<1l#0b=@)ezS8;;L3Gz*u-%OPcV_q-; zmGhX%RGtT|>UUO>tO0*a*SgSOsK?Q zdFHDYSEBWPcWB*uD%83f;tn;|`&nwZ^mqoGR?wHod}Eu#Q?b;6UXdz0{7tCA;@t&! zBqaxV-EefO_$XGMS4B(CEN0X+(W>Wy7c66F^0p=`maK+PaR~Woj^U3h(NdCw*w^GU z$avnI#cgr+&<*jHdHS7UQF!;)0JBzEktalk_9<5wfb&XPI{QzVmpM?1U%iuYHcmNIB^HpHBVJbYoB_sX3J8p3fbBbUW3v>9thAzg-+ISRsERy-> z(l}KS?-d>i$pu~5EDwf*-5C6Jv4WyB9F@zt&w9&TW&l6$s*|y8ei?rM3&Kw~@=9d< zxvZhDt0L};&^e@nt_m(V$oa8+mNQ0=%0_6u9h_JDqSMkia_yK=_A17@u@dOo@_zT} zvq-MIBu+iiLL)t<3C#7Z$+AInZ}!7JcEq_|vjOVbi7tk~JSiIJaeDZr9BY#0G4Nom zSe(i|v?upVsw3D%$7t~d7~^lH4Ro&sNo5Zru7AE15#TrCr^U_>GI9qNE}gZ~KsZko6}lKB8b zBRo%YK)WC84<`>Ib!8wP+>V8&rUkNnrD$`6d{*vl0*l{@r8=!*!csLz_fCW7Qf;hD zvSw#_2D&;p;eE9aDwU&{lPABZnjF>ANOqn|Wj?RTp#u>AM{EpGL)b`9{JUm^733g& z^vdNdMV?@Pf8$E47^+Z_tUoiX?RcsxH%pN+f`J}n>zlDhr7 zr_A5dQ_*e9YB_v>Lhchn?IEd} zijs?QaAR*m`XTaPLX&YST8gF9J7QIr24Oo>9ld`}!Oq*Jyq8;H+rvC4ujl9C$SmkN z#nY={jAOn9@S2f|9HU}rjO~ET#Vtctqx9ySWr>G5yDs#QQy0~a>10yB=9#%6CHpUw371&?=#ssE= z%3=5{12+uIa5(yjs0(iuA5LgPv&jpJr_|ABfF14*$wF-#TXrW0VcMuU+%uEU+eM1i zucn@9dv-AExYHn$l#>5dU1If0+!ROJHC?d z&xJxcsZM0?_Cv<9JXo4eWKM@Y!f(lWX|%;n37NG|Q!yuYA}$VIj2r9IP^+{==JVwC z@$(-0S}dmz`crcV=1tIr6?JLz@;vNYBt9_ zhK3lb#6A>$-VeR8#icK^Vd)!kn?=9=o4!~ytx4pLHHN8sD9$-(W7GDj z>?e>Qv zh`45tKSr+T{F1DLgY3iFTM6BT*-*bC#ei$y#YlRe*5+uC?L-ajk|DNJ7n8Tm$N9}J zaGo5F5bkN0&Nrf#$ese`EZcV`&r`;)i@yC&JnhybzBa03TLb+Qb98aD$PqFB*vCMw z?ZRX1SfsCT_Dd6dXqBR=ioCxmW#p!|!!iZ>Cmv{`^r$yfb0yTP`RjC=7P`bec^PH zp5*r0IKPB`8U198I3)Xdk)jJ)uiq9L%sZ_voQ7H4KV4C`gq2(_w*9t2mk~3u>vJ4T zIFG&e$Va0=I$Xb#y)WbUCu^?Y>5g{n>3$~~mgtblI0w(qX<^uFa>1@4>!bw9_6qs$w0dt@}3{|hEm~eQMo}A+PSl^H(eK@J(&kOor5o}ws_()4X53C z_w{22d$knD`bki5kvx_eJz!COSR@&l@;RJ{mV4tdw1#YZ%~WW63(UJ=itewYalf9N zh<;LZ-j#%^&n4JL@5JT7&&Az-TIlXH6IS}<1}HmX_2mMxCoQq!HT8t*DBSY*fV3m$ z6e&Gx3iQzzcEr{(m&EWmQ*3pegG{Ky=eRXI`3^CEZGv*Y<(gwsFcBM zMK>6FbD!m>f{czc;U24lxe?5F49G!KzCFwiyJHn~!@MeT=0eNSiNE((EoG?w*9ox; zYek-gKCGU_WBM27)iUhZ@k%~Ssy(c4x-uJ*gzn5KO0_C*BRCB{`4t%6>z`1_J}26$ z>0_-@5?*KV>z4}*EX_y$UK>8I{jt$B8ak@F$l-2c=#fku(;{bIkACla&xJ*V4mNL( z#})cN_fa>B9Ke3`LN{{od~h}+9TMscy6%m3#(X!yrExw^CQvq*E_K|TLa@QePMjc2s#t# zfep^WAo4B);$zU}cr-K)kriM`o&S6qy4~b^cWg(@ZQUc9Z>ht#DHNJ%M)239PPvEg zhXm$-Gkw?<+~aOz-2;vlXT-Tl2KY%W>Hz<~^-8SaEuYOE zUx89ZXIx(rhRae@^xe(8=P>$PG)iU8oox;1fG%VH3IA6kQUBNy54y6GByS47QtzKu z;sT$;k=T7J8A|)8)p?iV-6{zj_9wAu>nc{$#Cmr!6 zo<0Fi$LbQmyDZ>6NxP%IPQ%wUP$tB=JRCr!4IwfC4TpOB$noBaLxn9t+2Y5|o&>fmXp4(et4;l2d|kO`Ckw<95(Hn2XZ=HVEAj$iJ6F^iDNL znm2p#*o$#Bw-~#92jb|mTS8t*A62^S1$3cqb%S?fw{)D?WR2`dVC02(H10J<)X+uv z)jkK_4(wrxX@fBnpNSm-TDZe~R_bX3422zr9pZj|Fz?InL-C2aXnT4k7j2ZnM3M)C z^QF+!>&_U^Z87G$9@!Yt7-ldTFHZoMYN^BeGrzgf1Od8p;5Ka%7I$T?V?YFwezON& zo?Q%&Yeny9c7omsLGBP8eD|=x*gUcgZdzb1`vYozMw0_#hKuKmar;~zQu~&mzOf?` zAGCUZX3yULokJQMj$pqh*mO=Sx1`bN3GV|+RJm`nw zojq`(S37(R9S#4d&*_Xv67@ z59dTZEZPlBSIk94p$*yPp6JUxds`d&;G&AqFd_xhKk~gRV{fjk=Y_lc6Je?ypzc9V zd=JL5Omv}EMBTwY3k6p8)P%V&?=uJPKaH_iz6de&d-b3HsfAA|6-07LD8ow>>!p&ke1R z-;dpr!}CxpWxuvvbiW7I4Sg>rJtWUD!3}$k8sSAJCn&h(pd_4o ziY8C^#ij!P%rG{Td|I_MNZm?Nyh#yj=5a;}(L&P{9|Se)aDQ(D>3j*E4RU~Wo*UpC z1C`D?SnN!H!GjdE`>!1T4wCC$_Eu~+W4@T#v^O`&Nj&aFu241{?8(RU2!SLri%i*R zIPp)4BHon)*gdc>>bqzy`YCcc=>qJ~Tiu(Ss8BmZ_R2@=I#+DkJ<;`Gca;JdB**R;fv&faH4L% zZ$KdWni?bBg4|vDkY8W0#*1MNsCgIxvu%cmqVN9JwPXk@DS~?|VaT9%xOaXs5(2ES z%SQ*}yOT#eF$0?=oE^N)q4hchReX-yxD;dc1F{oT$V=#2FN}Qd3VZsYJb#2?%z9Pw z2>5dxmxE5vm^03wh4F?dSo=jAUlvIbWtDAGmGs7d<118BufChEPIc?Z;+$`GeB5x*TChD0(?I*QFD)%8B z>+GO@&L2Oo#9-rP=9w!>a9uka10G2sXW1TMYS+YN9r{KT6A`_D@0WC2bl=T+Y_%J@ zPxrw(<4o9RXko^sG8l8G(s$EbXwFtdr@AIFsw2PeSoXMW*2mFQS3Dla?#K)$-Y0ya zL`Hg_nYI{ysuVLnkWw+n{9RvPw~Xk)=f z?)9_Nu-;-GoO=Ecu_s@Pjn~*)BJsnbrR;NCY>Th*66~da^l|$TG~OT={FNEX9+ev2@BXK_RUr}fTg=*8&c`W<$2nWo_R zbt#O`u(NY*gLoCAg-?IkMRJ|GaCh?7cIK0_=LDnG;aF&tfWTZsR1PhIVp0nJaL#Q% zyc_xsY!*gORN+k~Mn8LP^onrAjZW-L?Zwa2elm;LH{`5s24n6`r8hERJeJRS8P8LS zYKNV64PxUFKL5;v@HUeA?>swnkmlhh=fHm51F*B4JF3ZMNQo$g_hNR6JdmPWHw9Qs z{VRs?d8YMShWOGeR)&kiiqQUO5ys6Mi5Cs4#ne4! z$dE6^(p-HsZ}G%{R~gX$>WZnYHaIk=FT{i2+Q5s~~7h(PjzE@UC*+s-TAg@UzG+3e!`!j%dFq-jcbQrUbn=XQ-<*AAi+A$Ej>RuVtWdE zaJUyvGM$O31EWxU%mmZvH%z5UI zTyC4=?>6>V+|$CSGS<@fn_bL(a_u|Omt2nFZT)e_SPN}92gGV-BUslGJGxHA zY1=4lE4IQ4>Q#?dk>@B%pmw`AZiIXl!@g@^l$sZ|{bvHDZ2AsQ@xA)j5nCpBGJhBg zojvRg>RQT)%>|wPj2TO)I zBi3{QEVCr!w=tJj+8^;}p9+^MJ?16i;L=wQqk7oj#)J!hF<|1N8hg@LvAL4nOB*W8(uW1R8l@Y(o?l`CH)H&=OcuFSU0h zFIdL&rvBFp3;I0~?G~!x*t)5RP}V`fE^DN#Wg>dD4ZPGG(eNPwJLuopv$+VDMG~Z+ ziqK!uA{=I47Ngr5V3g$?T#7Nkhe8Xqsmq7967|$*a@y$OGdwkp9{3`>uZ@Iq=L(s5 zsV2bImS;!hsT4%*;W^lWVJJ{$h;^&#EL zIh7vg8uc=n?>(~mSdI^I6W~D|E#T$TwRIF zx|ujg9kwy0BQ!hT5C?zj!Z<9R%xLyCU1Xl5E(bw#rXcIUG(2$5g=>gDmbz53JBXYp zX5cekw?l`rM&Yzc6Vs^sPJc{JU%oZ|2IL@>pZgnS-q`st5+D4GFf5KdBqw$tuV=1A zu`LSKK8S+Oy7*$^i}4F|vFbDZC@~UD;(s3+_~G22Se(CZin*st5UWDp+U*jV^&|6l z-^9j)rQ^r>5j5-s66QI?(C6 zKg4E!ULPli;QMv@Ie*VYCjIq&8eA~jcqS~LtHZ?)3?9%t^+Q$$;Ib7N0^TFfmDy378?z4<8V11((iR%r9x)@ zysXdN(wq@EF85LxI2hyHyhzk`(qM0_5AO8Nz<;ge&o^6>N1KV0^cmk$VL!cjAwIpY zkoh@3BEAzD9gju!b@oF1b;B1$ef;)t20mq>gDv}rZ~EeXI`vI;JM^n#c7L1%SErL* zwC1{FEWysR^A zm;q@Pv12r#_1zm6as{TAJE8DvAsS5_@#;!EQnw^x!WDbWqW;iIomqJj`#5C${$%B! zby$5>+&^$$glQQgm07Aq7qk&+)q|ItK~z8&`M$-#eN&UnF@N0*)92iIyMfO+&67n87hKow*?&uVcy zeD`e>xFkUu@ULyepNv8I(3p`Eh5rh6YqXR zq3pIL4*GlH)wp83pdSD3LMm2rHuVX0LC~%F*d#5&sH@fVYAE2`jMpMDWHQ{C@ictN zyxKQwXi~?kH)O6{&kNsf#Bv5T!nQsoNT=pJ<3>h`m?24MT9$?Sk|BM&kO zZL}Sr{Ansy=*Oe0t_c#-$}q-}+1C#xGM^W3+#b78CoTqSV*jBK?BbkrDuaILzxnw6 z$$>ffKvZst#nDDXEUGO*X>c|!=aQK9it+R0U3|b;s}{Z$m;3bt*1?)W(YbZV>5t$S-w=+4dRu;Y)97lMdue zYv4%+>_nFZGM^_a=c{;STXLu#iQUxG_E7iT+g}Tgf9=t3lLV9f9C2WTH+rR{;m9CE z)TNTcBbS94yj{ z@ka`gl~F_fjcn)1>N{LhKLDQLyG7EbbY+4F75?KhqRzd7`@e>cPE9T|kM2u-}aLH%e_HoiP?#=uA9Yv_}$7h^(ye>t9L(chCt zzJ6PB_Trw2cjI(Wde|Sea}4ogp1{s?`EXgy9_l;(IQu#bRpcwV3?aXh_v?e}*b{hf zAR-&?iR~v0(TCo^aX+GmY!;d66P>BW}ziXikiyK=rXGvMy5?< zPq!OBX_&KH)C1vt^3mt34fgL$!J(Pan4dorx7t^d1zQG>5hYl?_n1h0@k*px3DnD_ zqGW^#x_U8>XO@S5L)?+smObJ|5?FoJ!)V+2(7l%kPnU%_d8j`cSAQ24>s0`9+9c&> zDEMT7UZuG>nQDW}gFVo8NGN+MOrRZ5j0<&X&>bCyEgY4nqx1Jvyn#RRs`PUS>G zzFGs5A68)1S9VWKr%yn}^X&8ov%~kUnCqqi=k~Fvqh1`WVugRN$)|s0hm9`WrS*)% zRr(+O>dN6gI}yK`Z}+i3{5N&07pCt`z@q3$ zWPIksdOLgTro+Hp~Ofu7K;$zv829M;xMm`u8PIjAjqzLGlXg$SeD$F$MJ=SSY5HMxF;uIRaR4s%z=aJp5-Ud|MJ+))NO zcRBbz{3~MqsIqU?0s9+_5JRo-8t2W3g%0@X$exLTbU@r6O~@Sk3#!eM{6V@Pq$@IoP(q2*3FZ7*Lvm=K4ID&yzLJ zCF{NQ|9CsguPoE{Z7T-0V|R{SbPIBBbf=&Q7@z_I0#ec--F9P)I*#M0V|RCVD|UB} z`5w>jPq;t4pPsdzwVX?d`#R6#*!OL_BRriNDNoe8<-!dK4x*LxAU51~!$$Z%IX7j# z@oG4q_9mm9J%qQy3w3Us#SD30Ynm(3^ygF4=V?d2R;PTcmE|pc!u@>ZZf-)Vp#$%` z<#6`B@L~hUqt$y73k!xYsCEIii_g>^eRkDIB`%Ig*&Fu>YA(gfPz_2 zGnM4S;_d#gjFI27$hlWe>myBB+w6|o(awm+r^0D*Qs(95F02?Se&)r(FSPK(<$e-< z&d8tZU%|*_k|&ZrS58_->iZt~zkJ=ThY`Y}G^ECQoB!(yY7Mi;Smwp6=f>b0A)evF z5=L%`q4wxf<^|WI-=ULgkhQMt3np-Krpf>9DH{kc_US4&rbmqw&w}jmx7cxdhVU_Z z8@!MYsU+ z_l>ZsN$$(n>i9cDKBWeeddq}nA03&eFS+hf;sNg$%;?VvI6gGNu4M^(ic+~aPCgGq z>tcQDg>pDQh-mjf@%vfwLeGwkZRP9P+Km#kAi^8RFuj8@|4MG6gJgCV%iQ$a@h$32 z`6AUUZ9Hub3D+!2N4O)7BrGf7NM0S)uh(_&Y80NuTIWSkVNcbK?1M3C3?P2UA(YDfuX;fiLr0aM>Ag`?UtP?lZ_4r8D>Y2| zpjFW!EN*5_LMLa6dgfsq}n7{#`6S_y_7_ z=>=sa-tBwEk*tya#PR?;$^MDoSbF8%+lJ66JAnkbS9oie&{8;;wS`+4+@%KINA|0^ zlBfMTQz&mWC2xA%ML1vCOspaKf`cPuKbel7^!NXLUrdlz24h>7X#6})^WN*Mglp8} zo$`MnJk1+H*aTaW@XMB)|I0lca;C~=IM;F_X%=iofO9d~l5dDyUaIkVaGUEd)zbW} z+7PG1>i1qmoHe7ifr~hrq(9x#i@RsVsoF7x=kobD?^z*xBa@gBEB?)u_35wsKzVq` zUbM|fN*`GgALGDi{XBA#UHSL)XaZU!5>nfW2mOoDk^HBr@J4q}uEn#lPt=U%dgvb+ zDWCPi={w~~l$>e1;=zPjfp~9CB|>`$kDp2&wrLh`R4F=>n-N!iMWqOL{`9vbwx*a8 zn&C&ie{yKH!-b)efy-MiyspOXOj$L9L1p<&l)bp7&r4gQMbmxgV~3nGca?h6Lr*e0 zSG+KmPUW3_li4I$%hLH3!U2_B;Tw5w_vWlRa!&Eih`>K4$lTV11z)WtizWU|;XQpQ za%a$nkoL-li{ zL=wIJGa>h%3eG!c5q)f`#<}!$C(f#z)P2fbTXLTrBnJ|vPtQj749U%AMXV!Ne8YLR zD3Oi#j46;F`ch}%!52#2O~buKw78a58=h9yn+>SFJ>vg-OU+B9cfEWHRkrQ~%nl{+ zYzj}r3vu>OIeX@$%e+!NLFYPCRQ6o?trSj#WGnXnW=`L98(|5G=b)zp{e(00CN`3} zf0@z9vrxX@2~__ry}FByv2I(fKK2rx)$B-NiOXI`vaEk3OrhH&d+xRHW8}C9MoMmL zoqI8@w1sohp^QM!#@zb5F7^BLC1tiPt23>n_ayv}wK)vl?83|ZF&uS>ls|7ruj@q| z94dd0vqcmJd{nQhZ>xi!^|>eg@p@t6yWVEYA*)7k zSA%5_sQ*JfN`0mt6$xi;OgNu+2|u#G6IPOI{%4W&)%?Y`bSavsG;4NDkY2yMH^O5| zSyJbRvYP%-oeGye^X?GZ`WdlU@<>f)3(tR@WO{B*VBM*Bwr=uZe6w=#j24j)U8wQ% zH04&*Gx!a^6%Dm=UBxOLGlG^E@G2*f7d?v5 zbe^nfj(THNHxeElQ(rp|!pC{Pfw2f_)7jdm5%KUSTDSoj;msrDCdHAGE3H8 zUI*VUy)oWuLjzk#o#iCXoGdaNof-8mft%AJ(SBoxXb2f{JZ$(VjZiasKZq>njw{u5)5L*gj{e9QobbzJ_KoSxacjuEBlpxB;k!LzJWj2}Gb-n->($u= zd=k#yX&+8W_sq83l$*6mIUJtK!d>EN)^Ja<%%T-ZBR8u9o5eqUJb~my1CH-;qUG3Z zV*T8B7v#h`GwB9poAT^ax#Y_xQ)ybM@$=eW`>7Hw?y0bY2DA?xN#;g5?}oZDcv}|c zSDmTzcqE&9iU+5UHBIC^_t{iHP&L-(dTNh__I%@4#TyZskjlGFsl|nvy-!Y&WsWVU;-(7gQiHZJBvNd+3n=TSIJh*spG);nobR3FF_Zq9LhUk{$IKheg@c)l+$Q{J&r({- z-R$s|a=LB(jfh7ziLUI=jxHk@Dx8|q6S8x7HyOWq(i;u+XU*kUhG{v|Jaj4+6>^^x z|Jd1zy7=^at>%6(=cWAw{jvmdpc2RN{bf>Fx5rKztDNf4M z_}`b@=Y|T~_gc-_WWpWc%+LB^OqKjRBm0LVKb=|U7e%UdEStSe*`_s(*3!R?X)*&B zt1jX+eV~3OSmS#s2J5w^yq)Mtd~80c^Ifpp=EJ?ZsYKNh$J>PIjHyvTitOcfzHday zfTzlR`at?xyYs2b5ZaG*5H^R**S3mxH8P0)u@forLi$~K#A>DDJ2_wD=dEj`nb)gD zyl{Ju{ikMqHsMvqc>1m|W7|LCqe;mnppOH-r^eDEDva)Dt@xu(DJ#_!YT1crWL{^v z$L>?!D@?iYXCxldn{0^#8IjrI7qdl2T%5E|CO$9nT@sqCSD2uE__NV@Ed<-iFS+Mg=t)T4Z?gd?Kq^E5p#T^}uT zzLa~)f2SU;?}V{*kPc^;d6U&w7>C)yeSKz!SGIf)N7;}qz5arU;)fNk^wqNsa0;(S z^>7{WrAVGxS@80zBeTzCbAF#IkE({T#c2Y)dx;n1Y7u4O$$SkHuFj}NJb1rN{d3Zs zBc{>~wj6?XOMfnJDIjUB1B2Jc^J}ooGFAqn7eAfUtV+&nmvM4SZ6XdoR%=ff3(HUP zA?Zf!sxQ8!vRvt5dNHBT2qN~&EOD(dvlh+dYMm?&4VZ<-95W63hNEuPruv9s&4@C>5V_8*zGQbF1+{{OkF0 zOU|JWSGkjRI$eHtTsbqzlQ$nDY12&+UnKL=?PQ(3n zPau>I!h_tm#aH}U=~UKqqx)(PB3x3r;NU`yzEe4ukWItoWoXJrIdb=rN;kf*9$K2P z<7+Te<$3>|u|uzwZ~?}9vuoiMedw1SKr_v*~i4YaM|5Zx;W?_%!j`mf=&UKKCxaS1EgSX!}7}yV85oIv}2& zdpX2QpX5#4C_=Z1doo_!M!hRop({KKoeElB?!b!npViJfI+>KmjQ=)Uvfk95KZJk1d4+Ie z10skz8Hb+n5E^$D@6lv&w8-9R&6>vKFMg$tNgppbBSxGV;+MVcP0Y(2nT2?f`j;PH z{!AsenI#80&A@eq^s>CCvw3H2;auEPPkWj(Z)gP0{wCB;wC7TGuJ{++sMTqhoS!Di zXP!PET}rw6C7H!TC3ka5Tkh8HRqvQyxSSMj&-7uu+2KTYed!8GMlQ@UpVen3aJ%7X zENd6hOYYK*-xg@h$?w;{4S(Ibq_%7`W7FFxRtjgsqpt9_YY5l3qbDO}Cs)HchgPG9 zFtFYQH2VF0~J!XASA==Qs=ZBay{r zatB*u&(n#CyxXk9pB;pU+a;V#ncMB0{Y%+qe^ePZdVGlWVfi^z`b60Auc72Phl(dO zC7A9#Bk`2I=*AGaYraaRQF4*S^H#qZHR($Y3WTq>-XfH)cg177*`9Bb$1J|@i0S%~ ztZ)t&Mu&JyE|(GfDvPuenxC&Vecpx|;xu`GL(Toy0A05U4D~SO)4xtMmONiZsVgt) zL~vYo_1hw3e%-EA+`yS!2rENV?wzJQM@{c>Ih{IjP57-fxA!M@>2N$d2|p#lmm_^s z@Hyc?eLo+@SI4pBsVA-fF5|1@q`mtTYMj$qB=^)2!_KP(JH!|ADVW4IrZfn4U|Dz; z{=&0172fvwU$LxdVnO&k$(uSQ6E{=y=Ox#yC7!LfDs9Ce9##6|-@y>SLdkc@es^My z0|#yn=gx&FQhr(zCHKYLPibTyDb)CLqCxFu(CC|LJHeREXM7~%X^xSOJvQwnuOVKc zJOBDJ$vzgH)ux;l{!+%H$vmo|c@9a_{-&n65>5FGiE}#=UF))=Rsq^)C(pdPiVSz-in}_*+VTq*8()j%sri9C%{7_BQk&L%~D((Hnhx3~! zy$9tm*F!Rx4t~<@iN~nOmcj?q*mWY0a!28n9+LCf&6{fAVGEu-4&(7b@pNsK{?P0x z48P#S(uqPZ8koVn$2P(u6Pq|noj0E zbFLbdWA&ew3oEmDjw;Ic6yAO;)ail2?t!Ai0v(0l5{(cJPM@y!zixQvXi^FbPa1~ z(_H$Sp+66))gLx1t?E(4_*alNR~#cj_Biz~BVm9Oz3xnA#>fmTCYjUX$sAf2=Zgz| zp2nVfO*yl{i|aG`!W;GX0m=1z^uYbC8M>QoIVCgXY~j@`-Z2~l(`dRCniF?6pG(3~ ziauDVG1pep^ZdN{TJ#Kis=kTOYs>r5SnM&Cd8r*E-KOyRv?GNf!C20Zrn-#J3!g^Nj{)*xGMNFFw} zN_@z1WpaiPFI2tbs#c1gxX^{gzR;YBS;9|F&Ef4G4{=+L;^(jwYRWvvU$>aV2a;Qx zT*`dUX1q@PrN&h0^10NDPWmPs@{r$G$;sL6cf!%ymj=Q}+3Rf0H93b&@z3Dxt72v^ zd#Pp>UrKP&m< zde_ws;Sj8il+T9K!ZZ0#Sbov@?9U&{ch6Bg*)F}amqTzgn8_*W{qNG5Me^5Ld>dW^ z^S@0w>=A;NmN79#o^0Bh%Zl!<^xrZXWA`}LxD4UwB-!Jb9QM>5=|jc}b4b z*J=c@v8Iykc4Os7;aHD#7mnB{#@(HSy|Wcbg{9)Z&ZL8I(=_dO%-EvE`6riEkCR5U zKRba{F@q`iUGDwTr;Pe0dF9A(zKiE)+&1CL3BM(z%@pB&($P>(0WQEON`8xLY-b4PTe-esV#I-?uFG4shWwf7txH_)|ZL@9UU&ca9};=0%0Z`=jf_A1WKI+bXiH6*I>t zW8A=kjgqr*SXV&EN-z1^MB}5IMkC1|@5qq(;Fc`v_L|Ne!=LK(Ms3=z>qpWynX%gp zVSg7l0wlNg=$$8}e~06jB^(evMcBD~dQMLzA+L~}q0LzydskVF(_?<>NYc&4<9ox6 zmWQ%Qo8ZWq=;3Vap2(-?!b1`d%7kFqyGS2eQx9s;i@Pef-9PGWrSv|fpKCtfmyPS7 zb3l>210~PWcM2cVlbBrW#5mbA@9vYzue}u-^ON(R*2hD*{BP3r$Qv6bJ!pNb%I!I= zmB}4>|LH$+N2W2n5_rQ? zYyCcN3Q=*^Ot~|S#Y?Af<@z*@-$&15pDOD&N0q}06Jq53-glEJG14D<{9N{!Z|$l1 zdLq|_A9eAwu)@}r(oKFZw@Louwr>-*Pp*xbQ-7ZQ;fQ);h4Etth8!0^iR^EmR0WZ< zCxVt%*3zdFH)CiTf9xt^KXsX6b6>f1HsJH0(cHc&@4tG|uL{YP43RJG>IW15Jd0{W z3+O+UpTZBWwS1b!eCRhCa$GL*oU8&3QqhC~%wFO6ihS6E_-Z@`~%0Nf}l~lPAsb%@!|BwjN*KhT%Ccny!uAIb1S}i$iGhyPPBMCNiN{ z5t=^l=;B|>+DvAyv&6sQDzohs(*Jj{A;~HmRV;bF^Z-V!iC{&4$-_0z<>}B^%JK@( zlq>dTcPn=Vc0~ zYc3`McT_vMZ}xAdPkpVCT$pUd-;-T%X(rsbIN8P2pM=Y*SXxVl=FGcNdWffKmh_P9 z&-P$Zg*LmheyE7}0m1;W<(2SvPu|YpU_*IdF9?+PPy{Fc6`zZ+2SXPm5i~GY80Wxt>G;N0i@(=+4)*IadDv?fFIF{T z;-UA-Cb19Kv~1`VWkqP2qqvpi^Ey&~54()!itPL6+_A-ZWgaG-GTHMi4^5xfcGqiV zeef|s+IJRYJXLmGk>ZD?+q9?NP3u< z($dkDkK(^wZtTUqrXeK%Hi;k7+j@MYf-AGr8FpQBPFrZJW)$~6t312tU_38^=06R2 zC;irwhtlX@VMoL{PaJY$@Gl<1?ZKtQeoE(wyr=EYY7;%`gSvQ4S7!ZgTwZNX-4@P_ z9y*0jOWavk7{sNx$)tx_Qu?isQ?1g(;aJSK`t2xpex|zm*mLo$Fxcjs(5SXAwIo0J z-(5HEwg{kZcnY7wrDOhlHtYW^=48j28s{!F-N!jKl>P0En`-GIW8OarBz}?PeplG= zqh}sdA4(qLR|saNFe)y{*OsO$}smq7}d9IP%jekG%)H z7!?r8m~U|mFgL-pi@2c0FLPRYfSP(3TU@=U;*V`stz66qJ)I$+qdKgA>OxD`bXQNqc+Q%iXMH7lulo=N{S>9K3bpV#zxU$<#7=*0<@*~6Ht6Cyb=%$zIpohZx9 zrS5J|9{mau&UPyKD@_O)P(s&blNs2noI!bYD^)$m?V>Wt%P2-uhok$w^Z9q@fX*YoOnGW$wk=xpVw>DKqq=+ zg)&k;{}1#q#pH|Z16pL_9wB`j4LQ~WpMFypd)DB`Qxod{7Q*_^R@}bjj9pAVM;CaY zC)vor&`At>Fob;BtF3&KjbQ`v3h&Zp(ZOrVd6O~KSHkJ>#eiFV9kHFC!|UxXXwUG) zV^I=40*%;eDxQ&u6rvZD6Sugr^p)z11Ghg#!kE{oX~9QVNA8zpF;g-x-GBEdw{y6B z9*TGO?*eB3Ok#DNBIb0GpU*=lRHgXSN>aw+f7Og&naf{IpMu?eZ;q@T&V%t8G_o3m z_%r!@3e|gTh;3F$=l9Z! zSSG%n4zeGsc&5JE4sL-I4aAM$%|V0cH@LZw!?MpPTAWW|)E70a?3Ehz zQTAdZMzHa?3Dw8M>+F@o^?fdU_6y;?V=Nb6SYUCoNEp=VbYEYLrW_DW`7fHzp*5W+ zYx+D*=aDV$eN&yMJWyrV^yuy|nnwE!d2+;+i3g>pvCNaj`-5;3w$Si>Cg{bKGRHE7 zxh*75)}kZlFZ@Pna2>+h7}6@oni<_)cqs3&B_%!>w~_as<0NutS;?`dh#oF-|LZ6D zWSvGt96F*_x3*)Su`-s#;`qUpVCh9RA1|HnYj*fQn8~^I1$5jnRpWmD zgZZs6Nw}``ADS}#{R9p~>dP71nTeOOr4Q)9vk+Hi>L$@#_=2_PRL@)mJin=s_=&D zdCr(`Rg*DIH{fOq53b$I;@k;$_B0*N0O@-Aj5DL(@k-)LWL`eIlDc*4GgGT3*&7G( z^PVls4b1qo)`{A(PcE8dFLUZ3hIoXND%s(h*7DwS%A()XJjq`7q@Q68?o{+=z+QWL ztgxjx$4l~$xqMzIUedh*GZEO=Gnbsd`~CRM1G$%)Qh_G8IjX2hO> zE=<@z1?@9_Ec6cGys%jwHR(qKzhd@vh+;&C3XO9R>qa%e*=L{f7as!#!L%AF`Q#hY zHw;Om#0GTN2J$LDo}1<}Z_O{D^P*go{A^dXtwm}RnJX?39!?(*YDSxp{mzXY!XAHM zYcD&_@oY$p;@}b&HeD&?XT5xGFVDkm-D}mw^pVQ+(&dC}Fc&wQl6KXece``=6d=6V zUgF}J980@}CWH!?$n@-F7Dbm(uSOI8Sou|T$QeM%I`LLN?M=vK4`xXJb^aj9^DXFv zVY4{amJQ)Ww`p|l<am3(K5c_&z15JGGH1B-IFt8xoWy}SjOWu52yAW1&5|O{ zPfTNBc@eYI+7Wl{Kh@f-4}CfdBSu+^3*3X7NjdD@?Im}u0Q5a46PIYirMi{uka=73!-ek~rxJXWVt#5379fU!U2d|vJ=4uEW`JiO^vV>IjAh>L8t z4fC2yt}-NxqdiOMGpixHmcCP^k9%RGZI5@VwXo`(**aWUnh!h}Q4!4XWpN~W4`H4| z9v9_%ay(b&A{JWo-ddNxPU+yj!ksk(EE)8DD2eNG*f7_b*ln>qD4a-L@$NJXE@StW zB7!CRTh+D&pYB#EPswWUnNY+&dqa}uc`~PNE@|WJW#8_{)mZrq-s#BL9@B}cQ^3__ z<>Gm(%kmCC)R-m)ocKL}cftv|pXy5O&vNgmawSD}tk*8YVIcF{EXm#1+#z}3xKcFb zSZkUe9PsOn@{M__UIZD@_CX-y;^cqdivRnN%m=TE7s_l5e_f0wqm~&?zZG%muQXO4 zD%Lp9t|>RhtLj0^YqEIT+e_XDV|ceo`o&w|Nz;7s`$-omt3czu zp{6`QO+AdQKGz`kZncV&xvp+nBG=4}3HNs)&M}uwqdoDv6~x}M6f&g8SaP6(tHP}x z_iU=hp3sM5KB%WdUn}*&fM1fa(48l|^iwX9v&hBmoP1tSj-X9?qHz8!x%5Lk*BdkW zM|^&odPyCM|5Guwek!j-6PXoE;!8^t!i|UWO1|&0r9P5z8c+V%R2*wq)9c!F%(~|i zVKiOixyF-64Y^i*M}2oUX1-pM8?*^O`0{n)iTg^iu8Xmq2JKXkHj zKU_g=o92@Hy`(w`Z@J#1fd4yh?OQ4Q+~esK?pNs7@ZAy~W?7{j)yX-eofU_fc`yUhqL@*r&qaUfLVORIu57pT}N&&bHmP3&eEgZ2s24%uk_pwkMN{T{WAWu%4cN9GL7eFzC+vN z`0}kX?P9>NMM0czIG7NbzqF8_vvx0CXuZ^f9+DB(8e+ng4<+=%|ydmPDYTjCl z_R)GgjSHaB@*!xwcA!;!9?@-_`PnOsEZJes$%J!*O1Le4oRhsu`M9)$%tBwPY0_8D z?i+&c5livWIndFifcG2RaUbG?>(V6tX=}>w)n&}sm(IaQm3UM)p}p-lwLM4Pw?~HZ zNPN+)FWZR+ESod49r)4RpR>ggc-FBbJhO-oVQJJf5O2?}#!Q^_K`mdZi&Nz&mN&Pc z?N?#EzYu<3fOvXMhGHT6XWf~m6m==0)G&#!XN#GY-kgDTe<@eFr~3B^d+|Sl-k+X1n)xOeaL<=c!Xr>+U63| z!U;piK&DNU4AURRJYH9VUtS8KJ;a+NETkueZ&i~mgGpB6<0~$ zrqzO_>vFknl!R&He8$vo%y{)m?MWR#Z*kn#ZDG#lHV)X&$YPh|ThoNsd)+mfiFQ_G z73MSeVKVDGi$h1l=Q)OU;qa=*>c2QWGEDs$-&yWpmNWg?JGQU`I zmiITtbYDFAIVN0KDf}PFpLO40&q28#9iJ^+ciHnC`6j#|dH>~qDnZw< zG0*z!RGSuA5+QjSbK&9FyXU|R@dI7Zcc3s(@?o80nc-_rnehL|+GaB}eHxEb=ct2y z3e=I^QM52wK~BCA!A)E+9wDFQ$%xBu3gxmRtuj*56wXzGBYa4tLRi+{X_Fp;}WWW^72UNYeCGOiF-OEU})-cvj6bg&8@ zPu6V{y4SRo9cdV0uEmCp-Jzn;GOb(tFdMCqRGE$;+Z z+NBJ^SF(G*C+2XwwFgaf#^7-@o}p!yXf?{GzCjWJBMPzbugjnV@6^(7Mi_=iv-mIR z5#z&WnXlcg=Yei;Fq7^iGxv8e3YWaoW$}&XRif$h&TXzu_j4~)lZ68)A2F0N$!TfX zIB=xp6nuX;aX)wr`we0^^4Su5aqynqoBf$;^Cgn2p zmE;-ZGw|cr6s9HE(^aya{hMcD;UHer>vd`8_EMekmwVVd*#|U|T$pfU7@5Q#eOtPA z@MR=XlJm2rO-pfMUCw0kpgf+uY{ZcCMH@z5Z2iDmST$=l~X~e40*{{n$Rqz4XcFZmtck*Cw;OgUrP8 zASY}N8+#S7`sqy48Z^arlz71xAK(oy>3j7bjG$zAGWjbbGsj!B77%z^81fufqg0;EG(%bKE2l)u97YEBvPh4q9f+>lF4d&=3t zn)anujK3y6M5k;TxOh@HcN_~k#o|_KFLU)0zV*sxrS#p-#Qs!GIy_b#%5=%_490bz zCELci(?GJ?X}+FxH;<%)PbzzVx8dZPQszcvvv!*JY4qwzXZePzOz)5XBzJW3Oc}St zn&L@m9FqOmU%iB_V>5xCvX@Ab-`RUL^IC_VPy#tK)Ze_L`VN>{_%n4#`L9A9tFJK5Vh%f0I4 zWOvC4`(xick(}?g40Se&wU-^Ws<6;mNQUs`U0Cae9u!S9&TdWZv5SWDZ?2#1GUU zh{#q+*zGl;Si78zeJSElsnGcESy>9fcvZ1iKGnIpL(dqBb^ebcW z9vIE&b+$PEZ||@s1M8&)tQp*y){lQ<<-vNim7nQn;`X`6>ad)8?TJ|CSs_8ce;;bx;K zJX%_^*`}Nw-nrBeuGKQR=PbKdt)g2=&bvkg%UYRn-`<&pTH+azxto31D2_Fb<?1j6RY2W`UKoZ((fhy<#xyR)@qu{y z9E!O(st%z|nzH0uKgp|G^1Fuv_u@RLpCR9?6|!?$8cyl>c<%jf$!L=ze(Rb{_0%#J z+-XRY-rB5dCilB;e*9c!Aq;6>OoiX~_fH#^G>qcTqw!>n68~}E3I<7bew%Bd#?QOG zr4IThv~amRK=?+!*d@Z)CSDA*%;i>PFNnuRO3U(#jP(f~& z)s4$?KRT7dGnq*&8tBNO3&L5D``BNfvT(1Tf@Os;*DI&9H$Mg61~Zru)KdCZbr_d9 zNV-*$ryFU?v9oR%OFy8@--|X|Jop$ILv*GE-wfsRs62_vspTw{zvuUmkIHF<4$bGd z;2SIcxX$+cEuOSu$z^(&jivR}Xm;(jp|Vy!K2cdz$o*kf$a7`g_O8+|&|&8#KbBVO z^W&5)M)QRof5wR!2ZPykHinP!hM2s~C%96wl8uU_e_w|m2{+X~PXm%hjApf-_!^`? z_UATvp8Vclmb_c-^mtA>+3@|pBAU$0W|dPh46R4u@|Vizrw$Ef__1<@CEh3HUc6WQ zYm#lPSs94m?KoB(Hzi%KkQm8|q%p#_Mj?N)<`Ck}Htj+0wfX zw0@BxoUcT_M|-lnZaE#Ivq`F1hG)}TDxuST74U~CW1o&=t@x(SUl4C#SGlivxiVZk zhUE0gJl$kTWOX^ZlP1%<(NtC!zgAm*|EaD%=tIF(Z{`&Bqx(7+Ig`Z^x5$IX2V7}v z9#3fhei$z)X6Nql7}%APc=(IjGW5GzFMR8eQA2q$(}=nk9f`M;{=-*$wAzJI)Nlg9 zi-(}wS8}TBr3ai?$SOJ4`3Kfv%w2u?dG_SOa1(aVbmyx05%zv^rT&5-V%Em6Q}!e~ zR+aKcgKQk$3wQKKD_+!UC_VrkIRgoYXT2%y4*GIf_N|%iT$uKC7$cG<@?MxC1&_<+ zcQlvP`-BH?`d*#yU7NwZbUAX}gMXHpkfdA~GA@U!Zf-Q~K8{B3qB;D#IlcOpaOXq@ z$9j}f^|lsw&OTM&Hz+QTh$LA2n}@bLVs2E#i*kGF{TIf*@O3%_Hqxs@xqvi9V%J;L*^)QE=&mcPt`Cx zqc)hBuv7L~p~Cz3^mfO%M;=M-yh-j7O3Bv@Y(|-}q@Hj?r)Dwa?`c#&_^djQ|D>ir zHzdO>f?5XFeBJI#l|w#JoqgqwFo`;{XI|Rgo&|=}@b8;T-_FzcWW8R+?b@Qct{jP4 zk;C@haxReBYSZrpw9fM($T9>aJMKW=jyP>E=ZtnFFN0@m{Ja*$O(xCEZ(cs=|+y-Q?fVfy5@K@hR7lcXcW-Tq3^G_N7F=Zp^N)HR$w*5ix@TXc{kl zr&S&p2T87Ew#+r;v%BnJH0ypU%vVcZBPE9n>DQIcY)+f?&s6z2JsNii=1Y!zAHIN2 zgxpJZ$^CJN8Am5IUh{uw0#7ZWzN2RmJ3djfAMW)LrIfJwl5jSb1fIR zDY-o70(654G=AO$^Iyt2?3-HXXh26xPud5W(>PwG?2|s^Su=hCfwd|D98HWxrK5QTlB2hOoMuYwT ze_jzz?V|%tsl0eqS?(~FZqfv7OoUTD#-7ynG9Ue5%j_y|DiR_oGPmSTU@5ByW#Bwr zyly&enCAFSmF(6d;Ig0On{;{qN&Fp066qOdi(Uw{@}5BdFBZb9DPfE_;ra+4+^x3U z0Xn@^-A*9GvDlu)B3Q2kl~rm-m$Y<`R~aX7j|Mgr&nfGkDi8 z6}rlRQP%>n+Gr%d*OJRSExA0oZ!Vu>PibHr-t(x5CYWkw;tq+6tS*PKVaCz3Ep=ILwabEkG8an0vp|FyQ<>6)RtM~781 zOAg#@&AbFxV*1EMSU6aN?Z%6fB#N$Qt!N)A_kh?;HeM>lR<{9eA8)FE3-y>?6pFR* z9eUh$=1_}VOanbQ6B$U=ziG&#osRFtyHlLOqa)?S|7=2D%}1(qv-E4$g%T#ef3+q5 z(%`=2(wDko@FNnpUtoe>5P0*7e+l$U+WrBQ|@ttFIqBovl|XG zWX?Xxf$xXn@!lK5$@6Yp?NCbco_yBKD$@9QN4*-+Z^1Lw-cO&<3qEY|GNv%ko=3*g z+s$<3)@o0>j*H>hpJu%HQNWSFME==WNJ7YOGC#bgHoD0B=Ef8@WaOC|BlOsaObE^dPnxIm2A84Ile9Q zrF=#tBRAU9$}^wi-&0v!H;?7d9;v^do>ki#nBekYBrWWXWj_Kh-%P72h6g#e^DQ&o}Q9Pk@=z%*=UgdcS7-BWeJPgKuT@yD z$I4@v4y7MLc=O1J>L)HZt`zQ8vq9CXo|e;G&RF{MAE}`M zx-eo4eeW4l_S%J3LD@7L<4Rb_NG3ZaNJq_*>90z-E1#bk9W{SmmP2cDhu>H8-Sn`! zHIctfWgZqMd=AV1^`?iCB{Tj)Im324Y{Ikf8Td>~W5M;AbeZ={wf=ZrWiAzdO|o?M z>Y5QKeURFn^DtlF#+Nz4EVGf>td9Z1I|}!_=42cfNlv6+9U8BEqK4l#6#vmgj$Sfl z%pKwPq~)+?gBRbI1jC&)s;60EY$rLl5m~HXT1l5z^|+AxR#^{}Igs>g9DO9OZzJdW z8yWb<316{wAb-l;?PYCy!Z(xCQ|7cD$D4Ba#uHU+(1$1ce6au9jCGA2Ik+%9e zY6jiQr{2hPGJ=Hnytf7ce>_yvPwJzk?@vSvFP_R?W&Q8s9KPwoRpI&{>=VPvGhT$+ z7Bb*L9ksuRp5EpRo$?2tck(vT(N1EF|{4`==ZSz?NzRzfD={q>FAqCAMFzI z^7^JhPKwX@d?~a9JrD-W!<4@ig3(J%FyN>Sh72ykcx7j_dxoIj{bU?oZ2*&0)>?O{ zx0yih$g=^^&AA~CQwI@yA`a=&y0E3@ZCg+wy08anCU1+t!!d}o)y8-BO|P`4!!M%- zKHufhocB`rYck)d+y~M0v+bX2ff{;O&yxF-F(VjHbD|(Ml5?;VCGfM(Lh-*cm`!EQ zfnU4$%DF{#k|%onGlNUqB=}t9@36xImr|zFzfR9^A4`n+TZ!eJ^6@?|Dhl@c9d3yh@^jp$l1;jhe$b3M)O-s^V`LNli@PV*g+3Bzxc~Z) z9|gr_6QRXi*^wpmU+p1J_gpBJdn95f`_tV_YM_8C{YSKJ(Lw3Ja7Yi< z!Q*jmSh*~p+7L$!x#)|3N;6=3Prx#w0bRzE!`-zGGPXmoH1m#dNz_Ex;ULVlGDbVu zw_gwOcfW6i3$l~YmKG0t)<-F<`wz}zpJYS@79Hw>e~n*7q=gYY^U3kvt%u5q^k>Ny z!}rG|bPV;yV*e}@Zs9EcGwW}yd=&O=!mml}gT=fPTc7J;(6dBz@YE#Z%?$&{WdE7w zfL=UTuKQ9@de0obW(`<%g?v``S~Saa!p@>cVu+>&iVC98NImUmaw}Y9i_rJiBrI9! zk1JW(u#hsqJLOtfJkCbY*E$$?mqvx>H(|&5<^12=f1EXdj-3rImKX4tEZ}k~999p) z@xYze_>U4OY%Rp_S;e^3t0S)NeI^cun?dm;S&_rWVCr}mYGyNWtHmA>8P>?i%*LY< zZOr;v2b^wYdNq?``Bt~>K zLCNxREKbYCsHh5wzo(?@sp)sest(V@*J~P>nI4Q);|&qp+YZ+Uvp>kW@!x6w@TrWZ zf8Pc>FR;#BR>;0SxnyI1iuyw@#dulHDIZM5OGo-}*$0W@+&*NGBaTNVl2@95t~^lRC1Xd`Q#N(RAGtQ>aliy$<}A zu1D6wBK+bU$*@l+B(O$O`ltaXPhadDZU`?+H)z%83C3~djAD|vt((e@ZWyBhVZ z(-NOnKz;0B)PA`o9#Ch~A%yduWIe1dv_fNa0h;GnV+iNA`h8+i6{L^l@wKqNoq+-S zY9(H;mwgA6DSs7bi*&I3bs&6l46tIYEk^VGZamWo{bmP1^FkuR$g6BqtfIy{8#V)L z$kzQVj`z7NR%aVw_23z}(UJE>%Oso~Q%D^Va|7wOH!H|OEIHPZN9)k0Q3S_N?7K>M ze3E`n(&PJD{!I+^dM>UW)vP6XRSDZU7u+dWV+ARKDiX}EMt2Ro-$P{W#t zE$l0QRFXqU(+lzM=tSg{dSdMvBRscvVBeYg9#aqeDfhJhmDjH0x5I9E( z8vbX6^EfTc8_s!Ftu})6ZEM4{@0a2Yr%b+Vd=2qwas|v*W#CIohW3vBZ#b>?7CGKb2Aj^WlB5O!u|0u`L6X=Uy+Ihoa4o>tYNUgVS|$5e8>ja zq5XltR>K55XT3bHq5(Q>sW3Kf#7%7_`W)!PEYL#Lnn?1>hx?C^pAQGGO-IMPd6fHUmrKFyB9dZQFv zVqfbZeF58&3!t#E0uw@eBk%q*F)LRULGZz=YwCzJaYE?tWJK?@Lf89zF0GBjP!)5y z)5kvQQyM}Rl_IUa7iu0$VaFCVSXbCHdnN>ZXEA?;+UJ-11a1wlL-vhOC@#!FI`vs^ zVw-X10MD6(>jnEOA<5yHSQMg*nRXHA?Wc-&)%L`yr_&VcgwLbQm_3(*tg&j)-c*B& zono*hwN~QqDd~C&9lJq${4DWwMl61RU4YLP?}1hGVBIc_$jY0-xvLS*akf`FlzFygj;JasCNqnkqkU81esTt!c5t70w;rw^ z^I=%q07=)&{VR*0$_`k$bu5kqyP?YrBRtJ=!^N5T_^0TJ$)Quxtj4)0^{e*#YEW#E z3A-!g=;q0w*Ns==X_OWgYzxLP+NFld+oAqSDeMj~*EA{tt8L@4^1L+$DO6%e2{n?O zs|Bfc!?-PP#7Qk}T$?iu{&Ng)_p24_&>}b-w$nKUL3X3p%>p5-?h!q+h`K*WEG>Z%o>3+sVOvxM|PPf9+SJ(YFLRJ&Vk&jpNZHJ zuZ8k7Eogd2!NElvn|`>$FPZyYJ?cD8PRIDSsd&l0_e%LXM32eGy+1V=TmDo$@pvnu zU34JFeseu}oVv$dF@*D@-*JvGl}kY9At`8l0c;AZgKta`RQA+Jyq=`z<@A6~a6R~4 zJl@Y7@NGV@zNd>l6YO!5oV}*~jH&1qL=I#ame<;H?^}Tu&te?SlKi|#y58WM|HP}M zhs91yLku2}0N-d`*idt%W5o=RVrHq*4>al1babSy^zip$2xfcVJ5r8QUftn3^qu(l zeIjgMd2)Wu(%aAufA1AyV6ZdJJP5!qn*^A*nbYrD4p+l0bfcfS;e#TgFFhAh7nyH< zJq8!9>0_U-9W%;`psY;p*F9f+c@U424~#JKYYn!OQ`W17IR~n}QSR_ubZ^l_)bQyz zLf>^HpSyXl^048s4eV20F~2bh_J5e)G`AL`X7c?YSA)%$KZ;ndd*Z#SF1&KWaVDQ! zBhGWqu@APPiGJ9T;b>Y$pD^>e{wh?l-%hVwa}7cdG3VgT1!4I~8>_5h5fr0^YmF{w zl*`A|fevuqK#g9v9Hi_r$JxVmm_S{xHscPBG3= z5BB^%eLd}2csf}FY1^xjI3yFxdey_~)(=rIcCY9tZG`2Sx#(%Dg9O$@wjYbAC33=F z%RmespNR=N+E|!dh5e_paMQLH7el(hwDOfW5kCR`y8bXvR>$sdcG%yRfWt~I*vt3O z^2>iFuCr1L*eXLg{3jK#ZiUhqUi$j_!=XzVU8(!bp`G;=0j?rE!GWY_DV|% zMp=>1KA-|)UdEw2`RtOe=Rc|go>^WJc5Bq3Gc**N>-Djg+O|ct`7pg^jZu63aePk< zu2~rH`%v#aH3QE_mr1moGBV^+8;I`ip(|o64?%NcU z$C={Y*D?%FqCShfJW0Q&r0b3T(gpeNp9}Bz^bH)EibYo1kRC^^XvZ8RdRWjy>x&}q z80;@6o0|KO#ePNj@~sq=y&j3*r=E)*Nd`DxACGZ1%ylzxL2Lg4bYji-Q9S~k$7i9- z5atJ*u7}*WT-ZOYXSTQ;=H7WLdQR1bOmZNc1NG4Oh&)F60~c92VWWpP^D<+hy+e~T zu1aXXptetqy!6G-#NF+8#p%N;xb=_ydGch(4z)u^XYR`*o$;|If_{n=oP-`!A}e9T zz2Uyw)De!9g^bR3ab>76ijF40HcJa{U%TP7M7%Qs9gJG@AbZRSEARPG50i)yyY#Vo6ON+(Ne-&&19g4Ma~2 zM)!B*ZmBpy`ey+~GcWSTOg~Ipo{HYLEYa-}`IgjIhv!rwcw%R?+DKtk^#pY1zH_$} zxvE?3u%6ltq3Z<4dG2^MD-OL&sWa}S8iE7y#Ts+0+%&XpL81+DGQPRP~HHi>>PRhnOqGS*I zPtztLevKn$Ik8^he(&MSdc391dFA~^iPyXDIsna|WKdQrjnck)`1wlU$3zd@-I9l0 z{oOIrClr6Cr_%3WiKkYjFdUqYyM4>B-CqGWpI;VlbJP)bJBXTCZS0P)!CkXFtnNww z*%@j+bW@-}f2vJVInL?QSKi3}ORzFl=)4tayFTszaku~xi6T*>+^>eQhwVa+$$eiUJi&D9)Zw1$vFC258>2n zbS4LD?fF_nk+b_-b~h1O(|_cg_2ha!wt%0N(UvBc}GTq%b! ztye^(haQG?@rSa|L7EC<9tpA zw5dC}(n1dWZqCp~xxip`6ka!_;qwp+^jTYv!}RmUp07t+YB$!iTf{?0Lp=VU)~i+> z;r@15@s7H@!`8T}ZxvdM?$qz;NmkEfOGZ|km7{g++6~+m& zHu#=D5#Wt86XWr_Kp#VUm7%RuGUh#?4%fan9`61orX^`(s0iE1j2qcg8KXR|i`=>T$Q>Vvb#F9a9cYbhq3cwHPJMuOzVCcJr{T$C9^c_o zbjc~fnCIk_uKXviMn4yO>C?Bo8iCl^I#BQCg3$&A)O@;PrD_28V;OL0HbV5w8cZ6M zgG}jK)ZO?jZohjfD#mJ{PyJ+gPu9hoQ_OMWzT92f9`p80!|ulkxINPx5f8|(R>{Mc z@#Wavdkl0mIwEGLG<0KCp+C+5o=xN!TP9;kt|K%PoZwv?k7ew0O=Xtguh)qJrgom9Izpk`}v{n__rtsO)2#5_918K8FMMgU~c|h4Mp!^Xnofa zSI7;TbYdcE_}pDJcoK{p)A3o}8jULhFu!R!F7cYJImX)EjP-Kwd_Jqlx!lGzFZ0-@ zdE&N_DYalO=*s!Z;XpU^YK(wV$qd*9nj!Nj_w(m+QD;zty1#v(G4i~4b6E!(y%TV^ zvkuHQGpnqSI`RZ-q*6;^pp%Ld5zIO7QiB2P`K>xvjd9;QU|IGDv2ubwrtIQ9TB(Vo zR`ORT=HuFbPB7a&8E$52=+n^@AzJj=cP>C3c~O$C7aGpoh%Mj6pB*|l?=l4`)QMbN z$sX&lJPgujU!ybtb?=iQ)e(56S&b}f>TClld5z0q@s!8H#Fd=3qyC7AHbQ~79eg>r zkF#)w&)9-m4~i( z1&Z5a@N!%%PS9WXHns*6WXiF06LrRST7_Zvu5e@iPZF=;n(f+9USyB{8sv$4x9~fV={Gq}|DHy=aXIz4A~jWryJptg&Rv zbSTZ$#r*SC(03cGo z7a1ZzvmQxTsY~bnN7D7Iw|B#g1q#$7j)n3PL*#w3!0Mf3_x~dYB$xU%`*18BGaVJZ z*b4x;d?ERmra(TpgvWR6;UA%T^Sa0^(L_P9FNU{J_uJ^eJm~@q=wy$rCqr<1aT->b z0{=~>?(9My&L1sB_zHRa-2GB`oL0lZ>EsG)o4{+l6}l`cgxq#}^c^0A&1$hIU1N;T z%y)W3?)R=08lHh4`f+}5LxI5>YAJlRKxx?+Sr z#ick}o`oj*1iZih7H^N=7o!3-@Pd8)Um>jLrrX1b`vUv%cF1@+4G2oYBNGdZqkhHE zminHDrI7UaB>mi>7zH@9uQ+_7Iu>U;z3x2jb!9SnQ$Be{ow0 zhJVjM(t|RIzh_}?M=aC2FNRZR{A2DkYTNWsqR0J9w<7#b1f+ga_jHyVXB&a)oN}0V zWezC!$&wzQq`#k}=Vj2oE+~5ZL;Siv5=HMVaDA>JMx1j*cjgr}@VzVZ%Ny&zCShBs z33*ZFxIQWcxh>_8^t?!Vd}YQ8@L2y^tW(y)$bvw`z0$*>^DapLmk<434(RF^fVPxW zvaPHz`f4>AyU_D}g#LFQCH55foG{mhtA8}qGnkXA?gafE+3-GWiQ=EkUR{`oticu- zu2F*_)AHd{PytETbMxzkUf(Z^Mf21k_dFC6o%Imdi#!3ZJRIR1!SMPdyuKWV+g;6I z&{P5Ek6BROLyp0uZs?f&MO>~{!HXWg*#FZ2GArq0(5L^A$G2B{3hbkku=SB87JaFL z!-srqjj1FbNV4@LJui~3*DS99bL;Oyk6h(E?$2^p>LKW|J(7xYJ7yzb$G)h^SZvLXpb$_??wn;cJa z)8;uduh&HyeQjTfDe?M9{2mPpvM#Fb+hR2}YIloWaMUaUGGQrLo@0n2JNkTssngM@ z!^V<+*nYoNxYtw1c`OiN^jB{Ub%n7;KI)~omv?eQ;FT<-#9Lt%{R>C(3-Q^l5*PbY zC-?8E$lRxg*9EJBBscY&yem27xtfCtan1zP#4%%=D^Ry2%FN$M~unB_p7z2KHLQbO_#;l zSq8Whoe1M{E$(xur|if3^PL^6XHd)bARXuL8R3~$Egnu|-~PW^iPw`m(jq!_J|(Wj z7~>UthEmK)|LM>EP);69XWL=st1$Q_%|KASG1lL%Atx;l!^5j3{+^PqS7yX~pt9Fu z<0380c8o(@e*+xcItekIn3pxv8OePD=s`%w#zjECTlEmUPZmbjVSmVI9NzFm1XAbL zDW2Cl`}Cpwt^bV4!!3JvTsR_-bu|~GkFpM|ZbtmOEO==(BQ&@_3d)y>QO8VCS6GFk z>vW+smUEIntOK;Harr&<`u#J|_bTT#D;hDhdl80CuE$^vMf|O45tZ6{nA)3E+ zLRN0H8T2$0aL|g^R!2vC-9{f!OEuQhSN^q137gk#5d$<0v0XhMPd2Gx7}@b}E@tD> zH}+r3JTRHe=iJlmRkYV5eqjlQ(r4pu*Bv!(zeKMfZQP?q?Y@iwJve-x`4?hDq67B5 z@P_)|B=o#!ifr=g2QJLR)9@MueHn;h=Q-;d+6mi-n`2N1SA>$6r!u%4^PaoHWnKpU zo1RF0iWByPSHQ}-5bD&GNV=YEy8@>DCyQP>DkwVXf*UslO2^qFT%j0OjydDrsxZiw z#bT+KKy6C}4pZM^aF%mi3BRYL>y5o7&z|l(5kEl(rdmPp4$?)tktOn7n6vWC7P}{S zLUnO84xZA+9py^w&`HA{`b_JFbb#`?U*cKzL@0N3z(Mw#$M>^>()tn%NU%rhlQ`x^ zL}J(&OPCv#VB&~;l-HLbXJmK0pYl{xP2!w6H4=*pRq*qG8-8y~f|57$x$`XWZEpsy z(g)phT|HI@q+xzL^|V){F`&;o(Y!(pJDb?^8f3tX0Oo+rEyRxYNx10|gw@8`h#g~q z$c!qOwr5aZO?~XA5s)foo?^5fnvJK@%VP>34?*ThK6<-Yz$DBLLm!h(b&?*tm*oiJ z`>zvqK5ML$5pnCfSlMm}PkJ{(l?>7CB>69AnNvmG%?KlBY)MOiMWa4wc;d4;3vSs> z60cWaa87hje<0LVHBsgnja?$~o8>oA-%tGfU#_02>jv2dYa20%x zO87k`UGLqh&X{=afmlynUf_=?#0FER>R<tGC#OPdoJY;_&d_)!? zsEggIgvN{S#eCL{bAJ27bdv!+%;YN`ETlHb9uwUC&=#A7!_Uoe>V6fTQdbf4q6*hX zeit7e92dW3busV`XJYSl;qisKB~S9UcaW1|8w4-yOspiw*|(_%Ddc;5h1X+x>{l_% z_M&)nj(x?3WTZ3y(TALwfPUmIr#R#8^M-bNhHp`1UM$j*yNmkbec zArqaMk1&_>j$WDUSuJJG4ELn7-=|_u4`6OYExatqZP;BS@p>ElgdZMWz z7+k8$3};)&b3ZYc943uyAFN!Ph{8bTvg%eMC7rBAy()>nr=;siy_7{<@iWm{s11#t z{&+CZf~*;9G-Z~+GmxC2Jz@C#JPFz#Id6MjiiPx#J&dnJ;_$P=d(M8bI64IT#-&1A zNei7{+G4@K01CPH!y3$F^K#(TkDtTOME9Wqt~bSg_SqwBd?9^Q8{I3caJMuMft~GO z*Vhe}k5Z6$Mi1vAN^yI5Ds@C<5`XUUwjS85a7tv$8g3k=)~yu}>}4Xvw@yC(N2u1l2uONPfBiOD`5f-fbRAp7g+F z(-yH)UJZ*)eb8yAJ`}C3A#a__tZ!@VSni6oC*$#Ulqoz9mg3(ao|nDkzeu>=q5)D^ zCHqv!lxd;sT0e9jriYjA)|i-4fP1fPu;@q-raXv+?rL%+H73-dlDTu=LYyEy-B zyGUj~zZ<nfq6yhHS?J|z4; zO-8**0k$9Idw?3#&YGpTOfTmylN4+nGy_*y_cqRNgx;_cGB_I0Np1q-vpU1*Q%77l zZH9TT$on|r0`KDma31D_!f8`*mwR62H4gZZSBZP3`IxU?0qy$U=(6dj_*SllKMSl; zW^2G2)CD`}9U8)Z$iZk|C{B%newq`OJS~ICLiW`Xi(uaRTWqj?FYGy&>5&?UnVe^J zTH=5nFZ1CRNA9FXICYK8>P@jkb5R9K%?dHop`2P%^1=RI7Mi2zx04OVrdi{4452<%dUiWFwrUM$59t3l}W-y89Ghbnx?cl&Wg3$5RcA#*edr%wP* zimXHHLXkgD1GCafarjLrnzof=M}C{oe{)-8bANR@KNEp2nut+x#_&^xn0M73{VIbH zpqB%gdJUYW&&@qM6X7O}(4f9R<-IgQ*Q??3dOytK|6kE<5)K~C!){M|dd56)J}4Fw zBdxF@xen7@^Ra+_1WDIZX^=sp&R5YPL=XNs(Wv#KPvDRfGZ+hSeXA?>-u1I!A;-@->p1<#j|=b%Tw&1hR3qz+_7cPGRS2*AQI@dzGn3UP=2 zHZtkHaxOYZ@2yyB*(%Jxso|@P7n)3skrH5y|EPZna+f5_ePJFnKhfts#U1;9HJ~)Kob2lw zyo->=!%4&>POof(8t;Fk@ z9+E?((q|#AsABaBzC-MQ0|#uGqs;qumj`rH{9$dE0zn;l#@rH^M8;xx75&UTdn4cv zH85?eP>2aYciy)%URz;`aXz{=+Q6ULyAvYA$&aHyK&A?bVe|?Vv;L59y%87WkW>Fb zXq&2F=@w6{3^vCb<^W7qAz!1b3p|3u(b1860qQEhwwGfH=dd!}m}?XKPbB0$5~{QG zPyX=9`$HkDA_Cmwr2t;1YgJCD?f$f`cll!Rl%S}-2`a__RGvnUm3s0E} z!?&hb_1+GHs52hSd1YF+>1cC_gVI+MoYbvA_i@?ynO%j+Ydd1rimxIfSp&WMO~WV7 z-wnG{w{(Ggn*)=`dkTP6?hHKLYyfBSF&mC%VqX^Z_bX*_qx6w*HRPN`ClnfLdU*TK z5veNlTTuUUV}~DfKV{(C5`9eQUybFFnaJWkdGxiu_}=4}NFOr>SyOy)zDX0YQIlYk z6N=t49vJ&V6K$TjY=KH|#)2*nLI_n4jAMw~s zzK{H}3N-PaxSF^xOw)INPeqE5E(f^t<=?0ulQqP5qfU2M<>g0oSPPhTtjoH{4K@A=v*A+ z+^~yLZzyW75}KWiIj6|MkG=*_jJAY_Rtc8Awxh1g12)Wj=w(NaO-U8b_MlJwcnwaf zDPTT**O%q>(YI$5GGmQ#|Chjx#|4<{#OK-_YIsK_V=B4DFY8OObYT>#`8!Cu-nFnU zC|&nmiJIE;qys<%vcczRR;@fZm57Q`;8rjS76254)`p4 zUd&g~CATaJ9n&pgZEFpMIi+yu>42?wW3f;-5!cBEsmLv-4zd8BmseuupbqpG+!ud& z{oQPtijHOU)y=SlZ(2TLU9FIt9t8EMc&swBM6`7|W{%>1uXCBi>kX;u2fK_bLVJY{ zt{e`)ICm4sY#`IEnsdA^R%Cij#m@9Jj2r2Rv1R1Cec3l8F=iY2cJH*z^!b=kE};vunf|SrJ(9G0rfi6pq~?XR^^0{tMrGD zn}pRKG0EPOf{)BE&h*Bj zIQA}g+vE3B@>_=#;$%WCM#%I;sLE6EK9ag#pFsTOzE;Vb^Rb8`xO{fPPAMbtuhB(8z*rUA|Z+J|z%BDhTE%&YejL`c6 zeNy?%TH{=9!|nkX5co!H+%z8dtbMSCbD)*%2m4ab?iJ?@*?n#Z(NBlVf9AMlScUel zX*d~J4K+OlG^BMyVxc-bsx{C?Us~c|XL3pkkQVL)gPrv5hsIz*lr1KVE=4@GJ-Rna z;PK;!=w>gC-Gx%==UJh6N@Xdtzxs7`DR-`ddoSl93C85haLQ`CQZuIW8tO znc}b_HK=>2pSf<2`>cOE6!_uRg#he1nU9l(BXQsneT^TZapqDZ=4*Zw&&hGgpk6z` zKMa}Ywa~lR4qiX=;6VLu+~cKRqkrXPZ^Y{#6qoNaFW530*&VcT zlXXtN<$P`h+CxW+K4$A|{9Z_1#SrRcvh!dZSchXbl<@H=pK04hLHDvTHAlv%ndJfn z$2{!3?Euvafv6i8hpZ9g-hE;|b2s*V-I*=WYy^95W`+qW94UgyjA-lu32m2f!;Lqm`Ro#_br>L%ikK|f;VjeC_R`4 zjq}E6eOQYHqnVYyu}7b3HwotszZj;m&xB zwAF>$_Htkd~?h=cUrP@|Nro?nj)DO~Nkf=gkhd!~YFutTBn$Zc9eh zL-PEcGtlK~IleCcBP_K(2+eGDJd6v3%v}SVF>yoJX$7dA>4-fwQJ7PlhfK%2yGbaH;r4$Vb!W`( zx#;*m7NlgKdt~%Q7_9s*HngiCvp@54OazwJIzwYyA)KF2f}46g##g7owAcbejLEg7 zPb%|V6`ow{kI>Qo=f(b#OLxM@|Hpoyu8I>=BC*oZBz9SVFg`!*$b}oM zOds!Sv6+3I*6X1dZqD~n4!tTzv!Ht14)@<#V|GIm%@&Rbw_uJm>xhhVE||wPeS-|Kse2u!jAT~$m0F3{d%L#-KCU<` zN*s)k+%pt2+^H9!2JcdQ0Xj6(68p>@Xa7axqMrti+$h5S-pRN)jQwG9@1)0`5?b7s zTF7MMG1GB9?a9?mkC^r%KDB9&dq z>57KQ#xxB6#ky-=1E!A3$M^0tu`2YNxIg2rNZGBAe}a6U|H%72%l>?J5o8mc@oarK z_7}`xf6xSrYHFFELLRI$b2!Un;NA0s_)8sM7v(^Bb~S~?Eo-#yD#Sqp_O+%5V{l^} zWXQg}dW-pZ52=tU9ot87t2-`P@f9v?=S zqrugfUCa6EfufUWDTc`f8)A~{S0GN-_U{p3NZKy%RbeA>;p1D9q(tZLK@D&(g=DVD=-)uU@O?id|j&uw**fh+|EV zOkKGwGSM@-5=BX`#hrhT*$Y$0(FaqpnR7$y*|vx;FMwjY6VG7`XP(x@cGeFi$5U}`eg(RH z?1$f4Z9;jg3RJ4-&(^ep(_Aa0(4T6#(GDsKf!I1e0jH(;_thonVw{VCy3Ey(aJ|pE zUC?Q1yXd)W0)m}vv6A~%jj>krTohuIy)Ck*1mG(<-6e`2?}UXS8~b_*T+3~Q?!$cao8N%mmVd={;}=5z zlsdVlA3gaen$ko+Wr`mRcJ8}=Xi7rzq2Z)syg zb|k9xG;#E#JM*s6Fw(>oI(^9*bIQb{jb>OK*o5;fX>f{e!07ohxD#?wO!d^ki1wyF)lk;(RL@5+1Iw9oc7jb$Z`9y-fh&<}qWo_^# zD<5C)lW#lB6WimW@$sz=RJ56I(aIip_y6^!x*^#0l@LQU5irXi`il&p8R>-b75UUQ zS&%mpgq2gHaZcL_4QnbPyQ%~c4$RTC{YUoh9nnnx^X;P{2)v_@d#YBLeYFtFZ!%w- z^Of#9lktii_Vd@t0UVmjY^f^dW%NMK&R1fG$0)2daf1DH6Ua*2A^Lm{GK#1>{m&CW z?Gmxs)d=yQN}xP237hwqq4w7+G06C#c-=z>^LJ6-+Fct{zga_7HXn2N{(Q`RU-#YqrcHrtytpUL4DglaXRd|Sb9?(UZ?#LVrPPX2ORNi9d%^i9kKUP6q1(Azyv8P zto*?o2Q~I>##SODY5=_B--vPX>Ik_p8U2UpW2w6{obvMU?XfMwYB=LbOvKnO)@0*X z<7r|(wmsx?Lc;5F{4`mVB}_Yz zeL}r(#wPYd^n0?7c`8=${iabDjjnvY>J78R&)FsT>uQ4)W>c`YDhdPln_(>L)Gf0M z5Ic~%w@PVv9(W{j8+CBYG?>f=@=N#II?1UbtYfRDjv1e<2!sPNAb&C{Cimm z<)k)|H0F)i{zn`A&!$jUZv<=d`6~2_(B7T78q7y74v&VxDq}R1)FC`B2Oskqu>2?A znGKJ{L2__^eosYpCigc-tzqp_h-aUzaqCJX+B!zUZmBWWIg&@^LqGlOTKov^h;a9J zqA&NmotI6<$TmGF{j$K3CHdHY$QGTalZUi18XXVmqGewhe3mgkZchc2^?!*B{T31Y zR1bIdMWdLUlk|?3_{2Hva0h2hX!OB%uLRgknurlE%3v=NP^-zl#^c_ww7n%XnG@N) zLpUmV|D9V$E#0;v9BQ${&4Zpu@0pCrzs)!stj0uB&eT20RgLWguN9BQ3V#jM6b0h^ zOZEp#op5VHKE{9Id~Ja@G$&=_TUSF28Bhhq*Qq#2eb(ojJOC z67*iN?jOcH0Q&b9aYovntBcT04e*_oh7J{t=wBp-=}Il)ubKh2wMXHF=P2?lnM=Bs z@4O){ka99Yh6(jcD@UQpq8f9bhho;-8c4ccP;m!bZM!a>&(}iV=TX?di#$YUCuI8+ zp)8vF!^cxGU@JNK^s!xZsK$wH)FE}IK1jmjlXSiOfG!w1paZJzjY7?R3mmPPfREbj zw`Y4JPsrRR&&Icif12 zLFO9%Dq|eHAfup- zjJst}luyO%%T*Gux4PS3aewSXVc24TL(8VaAWjSKAJ}0a_e&2s;HDIj>qW%sl%&2>}xeU~|X@$uGl^wl@x*wq8&yF2|^< zQfT%o#^A2q!bp6WuY(m)&Nx0JpSc@$SXRPpn+<7L^cm2bUk9t}JfGC>o=sN5 z<5SR$Ph_FaQ&~#ILyu+M=zT^q4siCjBBQDp!6ArrL z$>DHEwzUBqRX8tn$irS+_Vy0@GoL33*J4dE^EUM%lQJ>ZwVdz8E^zWbC#JmB#<3sM z;iasHrZfxKj3tlLiuoAJT+x`FhT}an$Y*5DPd)d{3DsCt&Sw&NC3gm>V?MQ3OH=f4 zufHP(Ude-d8U1e7e(2eY497xKe6Fs*?bTV>y^Fq}C!O&i>#>+}P7CWUPlNU_9zXZ0 z&I@wzzbn;(n7D)RR~6~jN4+OUCTc>C>}c;WV2m<%6}2Y&X*^*6xDBxhV(pND-T zU2t#eG#Kg3z}X5jsKoQR>ywF(=gZ+GC5vwN-iR%mwK0B=59a3T;efd}e5CWqQ*y+W z3uFh6OGiW(S2CQd@l3xI8eV0Hy75-HTzVr$wN1pk05=4D*T;Kv2WXF>&-;=C-fKqU z=#m6TO*iNFD91y`Z1g@@jxWRB3*~)}#3pw=a_-2SGhikLHCe;ir*$l42177&j1{8c zXG|Z1YbCU_iuhYq4d?oQ#D^!J#X<#jcz<<+`X*znQgDQfc@bt!wa1VPF$nQXM7PVCSHek*&fA_B6J!sOO^|>izdBc#-cNHMJV= zPY;It&Nk7^>qzElA{LS>XTyEhl)E3{eQ=uGeh%V0>@X;g}<9d*9BjM*{ z!@A+-#&6=x)rr6oUlbP^z+$a67T?Z?{2k8kgT1knT!SCffgJIzfI&qT!jvi@Pj2Cb zhu=j}FIAK)+2e(QA#xP$n4MUFyty{;T@{MD^yzrLmAQbWB~UjlK-Jq~NV?u_WjUzc zc_pqd)xh5U{#eI6&<>T%Rp&gl^gmau*O`LYE-8p~Aty?z978tEz_6iJaPKIEmmgn? zzvSH|7*EDO9aG#nV}ohbpH+FeAUG<7nKa~U{4j*nw=x_xNJsA2DoDCs?8i=66D9}K zZ7N9Q{3e0@a({C5bz1Y#`qK^JdnTj7kGfwsW8BQIM#l7XOr2c=K+vOE2iLW zLK(`boBEG+)gk#3$Skr$i$*X8{6|*bXcbsUH)A1vgQr!SA?bROo|h$ca+u)xTs*(7 zfeY<{@HaDuJU!%U-;3a@VUJyP^y%kDV~wT}9&j$beGju~>5G;0d#>0mhvnn{2*t7F z%+8<8XQMuj**SB+OHW*wJ#LK*!jFhJ&VQI$XjFwkYQsFt{xY zL%JAYg0B_69xX&-Aiw9qV0^w439nBEcuaj(==f~%BKUh$binuU-$L%!Xnbz6z^=jc zEXnXaw2yhNveY>~jEC`=ICQIH9>D7o__Lq&wxtxS`n(n$WS)zchnU0Q6v+%#BPjlJ z!~T!tGmxWn*8l%FJIjVFx3&obHevyGU?)=2y@u{aK~$s!q+3C{yX?a5?(S}MV_~79 zVz*+w^SnRd`nuh^;ErpZF>_2Jayq2p>~9JL7cp1+Q9iEjF2}i0CCKjlA&}$0Z;6O*!Zx--bHdx8kA~bFZjd+kajg<1(qQ zqMq2jA9Z*tj8HBErn#6}In#`UH!_mzKSXE??=&Wo^ z3gte)lk8CYION_lhh|{~dM;*81^u71J>)P(`JGt(c|11tb|p*42=A3#Fob{ZUSY#r z(oi_Fo}a~M=_9{F1k>l0WkM~ZS*!T4_klQ8ua3C;(~&q#AJ#jq(MdH6ujT0L>FtN) z-EkByP(pw$1tEmM!Bw*6p7B6ISu(HJoWBbM6e5VBX zS(;*n8uK3eC!=o@Id2kw&albtadH1av2rAH^}6!;x{N!gCjwm>IP0|GJ$NkuvW^M# z>1$&Qxl+zs5^bXk%o@?cu!yNR%jcjWHTpB-vN2G}2?6)*ppl!vx=>*2 zib{CPW>^v_xtQ{Vm@vr1~WTn>rFDdI>f-aRf}xvYP3vF#POTe z@YLyyJwXk^!b2ZDXNO|eL2WFl7nn3D8%|{IjsN0-X3ht7Lk+QHNHGSdrbEHK7}hfF zpb&mV93Gu zL(JV}gZF$^xx8`5QDMzAM)>}7M7UHl$F6_vEoqN3!(@xK2W>N~pqiUZ-Q*f(LP8N9i zFAhuS=Tp<*{BunKS_>>;)n+D+JrBc%JTol$ME#gw0X|IQ9>Glp=j`qazhPSFxq@?z zAX60c+?)})WR-a#c85RAnGw~qQX7AAm>;(?9@*UEPQKX_R$hO^$r$FX4yCT&Ko@({ znVZ^$dhgzLc*`1FV+!9$>YXpM9+=uN2XP;Z$%*KOqw3Tmkzd`I7KDaA2AFr$9ANs|3yVeaXKEaHFDPd-Kc=7nbsM^1dBSVaTR{iU1B20VmjN}W?1z75;n567tPk{{ z=7yX^KRwKBE`gyRpYz9A4=x>o5bw_DGN%n%a@EL*vLJ7exuR{T7aQdW?IQtD<=ke% zc6%H;orh~PGB9jX9O}ZP4-}G0nGQso*@Zo5$+_K%8#X5(16o+*J+Z9I~eRpIohj?}b06(8Sg z!#jTxvfhxl#=c04pR-S_mqs^F#CH2c+|x9t&z5sKi!^N7Q-p}s+)Zz76=~Nr(b&fh zdky$K`pG@s49@D=5ALejT5`e)7Hc$Uwav~*l%-=7`#Ll|79rq){r8Jn5sW=amZ zF>{PgWFw7ysm4Ftk9~-Or;j0;YKpKqI~5P_mtbeUB2Et`C!?NR{yWp4)Nr&3ic^io@F;e~ufe(a z6Xb$^exX=U#@Z^=9CybQV_p>J1u-Q^=splZAxA`v4!J3DnYcS!2L_yb7aYu@KgJTr zbnS4`G8PU;O!3c;9LR&2P`Fiw$2aBBPv(R;Odf<{l*8O zXS0!&McprX;+OevZwRS?{hoI4zWYR&(>GS^7ziKM6P|zVFoyn|3op1Y?(YqcJ@j>+ z(8H%*rSSPqErkZ>`^T<6vG#l0T>9c)Dzx*-g zH@2&ieBJ}4Hqbx*Rj6@pwv~F(-pzVg-i`HKRu+!#u)|Ne0PMUH$NJ6^jdKg}GbJ0d z$5Fe5?_$A^x8kbMq4wP$Bj*~TZ4dS^zj;2cZ}db&uP_8mnS%s>>PBBwVCPHDWmKwBxU4hM zCsK3MZ!8vE)Wcvk3lu7H|7uKs{5blAZwJGDd=#c{w}kC0-eX(SVZO3Ja?WJ#It8pZ zltqq(8muST!cxH;8-F+<)-(72xj5d`VkB*h!s=Nru;BMR|9Kvsd?-Z2GzCl~PdxDQ z1XvC5hN7_<_V)#TG~~gnm8?mxX}DU>{M*&s7rW)-)44e?*i?k(rJdn$;i4F}*9<>j zbFONqgNRN*zd`w^9|5dRoJ@~ZER6Yj?iCI07@9!;9oiqcj zA8j!3>o^Qu9)s*d>QEd~j$XO`aN^wcmVY}0b?t@~zs5o}-2}r}6EwxSpi?^EtF=B* zT^U9%HS<@<^O&wzgzke=@TjUxa<1&deeI}yl!HTqDo&sDgjEu4L0#Q2-Xs?fmQ6%a zRxm^ib2yZYam%_CADD3&pj?4dQHq%JLUr3i<%wspaDK0o@agGs!LmP~+yGEdAc@Fg~);Qih1ad<#DPzR?<#!+zY4)N-kbF|Z7_lN=5RMh_^`K4nKO%=ArnMAVh&jRER0g9 z!lczNh3_%;;qsbrcJYAKa`Hx5*F`WlL%x?aFeZ|$*>JQhvP5szbH7{28*3=Q=HmBa z->|>p&a=@dxWjzcqOlmd-GMb{9QD>NctO2(7sCWHe8)m}MKKN^oq-8f%*p=uR9x@3 zMtmuCMHBfYmaK1P&oV*oVrHm6HNz&SSSa%SJF&(XcgO{6>y?WZ-%82n?cUV|q1(QS z{=UphQuTmw2lmrCmIx=Wq`sc}85u7O+cg6X21eM>tq|%FDR|O@oZzlqu+d)z%S$zx z_3eg*)V}2Rc7fIF0=#`>j}BX7@X{(CXZO3}7@vRcOAApjfOUw(`r5Tz9_i&@#r=Nj zQ2ytEtcymd@bt#W(ezW6vH$23hTQRqc)8yL$(p5zA#-{8)qM7Bt)eOPjL4m5ilyvn z2kDX9d*24j*5~8F9(%^FN9ak9E-IX&k!M>7!_i6OM1l z!HRy&m3{7uHOv?+LA|hTq{oVi=x_$G>!tXckH~81;d z;Tso%wa=2^dD{qkZ!%vwD+AYzDrfe)Ek)EiQ7 z^*tJcY^dv6$2w$s0k)q{#?B!{lFzG;ltskX@1oZiLnypUfFpfN!>EtFUzUwGg-(!{ zoeY2OA6{`!x1g$uGyM!y|EiLF-aO~d2-Im3PxfijzcCR>~^-QcZ;Cy1g8J1U7!mDpO z<{u#UP-5RPUsVpKwy(szf!zBR1|X%H+%XT%C4S@};EfBqItL;pEtO0TQ|ecrMogqqpc{&;#CcPdF!-NB&@wGqowxF}yMwZ`a$yk~;6ld+E~~ zQ3Tn`Z^b{Af8xaU@%U~*e_JPAtWc$YdMfv+^X>6|05!J%BH=^7Y-tPc6NeO7Tq}~y z+m?FX&F}Km4Zaj#rjQR3=Z=-+Z0Ihx#AEV~H(7K3w%C*PP83q&3}M4Pr4hX)o@)yv z|9(-CalZ1RHG&!HV22Ot-W^H2b0V^nCWha?B|8J5uJuF!9`eI z|3}0LdE8Oa!oN}vtg$x1uGjQ64$njL8}b<+gdyf+I9{8XqH#a{zAnsJ1fQ?hQkCmEbEtvPLH{(e?<@Qi4>%3n4?G6ay-~gZpML9$>+6KkwvOw zdyGC~h{G%0AseTS-CIDGDZlU3o9>JCL}gJBOw%;*E|q-Yor!q(i~1gkzn|Y%d1NLv z2p=+H7PE)Bw^kda)dKC_=i^0x8)$xaN798zJXx%X)`^wa)r=8tuRZwLBX-%%swZ;khP^p!W~Vy*{$Jrk~pKZ#exTILB_^vZztP)ocD z;Qzxv_grp^6qh;p$2x1Xz9n8dR59ml4mRJAes6ByCWCrve^*^sM?s&GaO#p5yCgE(JDK-?8;%K)FdgjQhlz2~jDYipMf;>#8X=4Og$%V|l4kH^e zj{3>(e5NQ?PsGiae7%n7<4yNcTuCKwhW*hTzMct}e+f9NVmS8>A^x04uJc0eqS>%d zvqwSL6v!DRVH)>%r-H~Yu*|{v!>rjQ@(x}5HHk4HP2%iF1Ngj{fw-daxR>CGkCAh* zYb165O9C*K{N1x#P1yohV7@=+w-%*{y4W294C{sL6kYVm3`fb(|LLL~v2JA+Uaq#m z_S>Eioo6F(t$?|GIhrSCV`@+-q@E|0!z1-PsXU#3XV;1D`)`RY^C#o{;w&80AlKI2 z3N1(Su&WpdJ)O$EF82@2Wz~xO8(acVjY;eIZNsjV5lLU@k1@&CiEZccz$+ zy>Fdh9UFz)%5mtv&XQR>%!U1(#i2*BaIWMT2He98U0)bqB4?GSs7E(o)s)6q(%P4pu@`jx52oX`Hr z(;P}?<1uzb5>9Fx;FaxMXb#OMtF;zV&y&h=v47AT1>D=$`RkzSP8b{;^-w2l(2JfL z<(}3EiEzbZ>qzoJOrW*89D$jc*gcqgKwVihD=@DviGCN&c&M_jaO-N1s`FV0IBW}@ zT@!INHX5GXmoDDLePMPwI?Aygv~3kS`yPv^!3HS#FBN~u9dqB!TpI3==FneNWbKEu zrfGQ1pX2)IYCLqIzi2;wA@hd9H~6t=)MDM)-XF>X1s)YKcjp*$M%vh*#MBE*$ELt5 zo6OS{rDPH1prV+0e#%`@>T*w9;@o=>c`V!QH1XkqBc}OfA>_L~3OBgohtV9=(`R~` z9IW$kCkG8NF6hpghBiN|CvThLX=yp+{wCx0 zrAo>9aVzPcHa_xC1Z|uE|GU00-K~Qa9bJ%jHJw~|N32<3jZ#0(O&6OIqg#QK7gC`V zT8@^Nvdm??C?c0@VS1H6Qui2R+Ye?IvwoR&h|lXWQ*i#Wpd>4 zNIXv}XIAQY&xXoDapX;Lwvn8t3xVkT!yEjHIM%%PAv=`hF`X>!w{a_}($T5HAy9KifbIFvk#m;pT(QjBHu98z0TTzPL2AL>x zE{Fc`*P`)6v(U)WLfdHpD09%nALaseAIy0H`EW^3gE9JCEM9ImL)T<-%H^}ME36pl zYyXNtQ$LF0a`wH-O2yg?D_n=$GUc(HhR8y!AtjStSP20y?zRUN@t;8lmR*pslu+68PIcMKP~aRsP^xL z-1MI!#z~uc6<-wHU=EzA9X1T2&$KuBX-gx}PhlqF_L-ySnqrg=;9aLeE^Z*}f@OEb zZU;+L-p|3V40TM+vP0?ST>Mpc#Oo+)yvgRi#z_Olk83fKJzdhZxsr44mL2VjSLJU- z|J|doPSq9<=p!rKX^Z3A(sA?^Srtn?QC=H?X}hWAbIl;&sn;}`JlnvPSD-{ zRxGI=1^Lm_aIc#Ul6%;r{zf4z|8vInvojzQ7!R-4mRPi^3}4@7;_TcC9QKt(*JE<% z-I;!ZlVP~dcNJX!NN(_zefyh}e z#O9Y82)1=X1X-c8xj#R5A`6=}ED%vQ71>WB5Hr*X#u522{G5%|&!{C%?t%3U>0;sr zGi(`C1l*u)=?)=OyBO02pLR$NnfMI z6tq0i$Hi^VP}-l1e=Q!^`rV7!IH@Q$(uXVk_PytEZ&+JJ?T55^f$gO);_={D;yU#^ z>1!upRK6}Mj&d%zA`^D>30KH7WUSv|C~&2Xegd2nnCMKE)40P?m8_Wd)mn0?Slv6tg$JA=Ox2fRU4Zp z(pSK`bvU0dSL3I^l{?g6K9{tq%eutox|oJ=&C)&E0*EU8~T!3izJ^H^{@k~wVFicstMRWc{;jSn`6v3YxHW#L+Ds1 zY%>qW6uSiE%A292xCm2~I0w=ym3*F5y-oQF1$>WT-P2nO8}3fSkBj7ar~`7p>GRIE zMyru0PIDi;xmph^xJQj6>#fV~V$?Y)Vey2|B8z$k_07Iedrco%xf3q`NXHEN+08yW zqI5_+%&FV@^`Q*igP2>mq6|~?6mYoDZ&Bg!#Cmj^sR8}t`A!>p@v&)V|1Q9_Uz4q8ubZ*!-DXR%#s~d z0>L?%RjDnuIh#yE!$`VfuNXtdwBy-3=;aq=4 zE__GRpEFFFZf)!^MYvR+5y#TB@Y6RGU&Azzxx|KE1ZH^7u|u1;7BDZFh4>ogw11+{ zX=W@=mX>0fZ)ap~JS!?+P!GH%3#+;BTF~2x`T+8iSGnNuR(E`==_+oa=8&iGtfPO^jIW33>Jpk!!g3crgvz&t&1y8B1hr ztAw6Y9$s85Ls82!5weE4dbS!E-_sNm%e7%;=7fhv8PuOR;8xKLsM|+l?0Hkv%qoKW z^f@SdT8IVNa!}n{DvEU7kxG9;jlU5F4A({7*F1b?jqdH`i?V6a$o*u5F5HW1dS|2A zwHy^Wzr>@M`@+sj3kER(&|_}yh$?#oJkBMf$_BR%M?uv;77121h?`f8Ciy%ZA*a1Y zw?XvVa#-}w)L^L0^T+ z*f*l@5Hl3LnuBkRT4WX5pf7deXBC_=C7r&(k%?&cMFU!gs&JV5fHh&YDD&(@es+uK zGe{)pdq-I;EQ`hZ;rmnRan|972#K^nOnL~3{5&O zhVPz=yiNtMCs*a~CM)c}UWA_=>ET-$hui(r;s08b|7|W*dsE~4Zh_=`MfY__vS&Yw zYvDlUgCN+NYa@827kb_*Krr{z4eLT7Pj<`CRnEwLSA&8WX8XEUNIp+0FHh>dd1tgd z#^rq!+jpyB`5;F`XzHS9gcDkyq~Sn!7euXb!`jSQSi(Hov7hKq?3Ta`XXd(n-X(s^ z?G;_~LQvF~dgUwQP-jlAqER7iCRm|1Dj4sO2^ID6NW8re?L6~vTxTKnFKUm4i{z;P z9}E9+_GtCgz^)@sQ2i5+@UE^<(6S@XG6DI;nvf|eM&sj|P^8{X>Ul$=+T+QE_d@rX zHf%mkgHkJhKY4HTg&`u7ocPBHfYoFq4*o7ii!@C zF)o$N>k1p{rwZA#+heU`5qSq#_<0e%p|xT!sevWbLWjkgDgCdfr^MuCOS0C^imP!@}XbA7Txu^J1x=)XN9BtAzfU}a%NT! z>-|GcXnExdzv5&pRW>32uL@iEd1=4qO{HysU!Yz#8l@*J<`t+Fr9+o3D9=3(35X?+FOvJWk$PfqH2>sPfy%ASuxVU8w#45F{+g&vxp+T+o$OibSAgf16- zuys=$R*W&i&Ko7zMBeX@kEPhwqbEkVz7aAv)bT4gn7Px2ctOs>&1;z$d&`QR4rdH5 z2xWGsA*^1Ki%HHx`P4FKZ5oNaD{hOyX4IR?#Nfjm3v9b+hZnI0sQKs$Ekh4PeV+}} zGv=5i{>S~vgnm;M>(6H*=yI#L6;HkCHE*;F)yK@m*4Viq8=B;b`*-8aDJC4lx|kq; zbrI@PGB9FhF+O!`hy2fNH6fh#tTMhwr5Vo9{OHPJ@KcAJe~csu>X)2`Z^aP zR4xje+A;s=KsQ8weIV8(>0#6QXf!%%VIQ9_uem3ZwX#L+EiX*Dngr8Zz;D?qq%F(E zueoKAnlsY9ixLL>cq(RxkWYWe9~&o`;OSOAKjU+;aFacD+xVi~Ar9Ul=6JcV1T*Y1 zpvXNQOk`kS{7anbqK+Eg=cg~}GJnSr1tnyeYdc|hsUIw-#Y4ZE^O+jvI&YqZN%soy z`kEZ}A8CW)U934xgYYTen6H;NE?49t<(>;VYe&L;sovlsc|FUe%lKrCOs2keji zmbj7c$4sXj8_o~yv4f1tt2MT0^eRF(`q+)gh>SbkA`FH;6fvh%;Z`^mhwkv1yTAz} zm_wC_P9F05U%7Q?e3d} zCu?kR;A0q?7329?cE+5?g^1dphy4Cz{z=pUN_{=0o+njbck2E_;cazRT(dDl_N_E( zV)St`g!~<^LfCb7K*pX3jJ!=B?Felg;`1y{J`=@Om6AELO{(jJ^Vp>#oO6hN)K&YI zTVvG+0Y~o~3>q;N1I+zk$nQYuR3&V!BtL7yB)qu405{#_Ay@lFR2QgWZyRT%r5aOH zZ4E>ES`WTrzp3C0onuj$O~0~YSU#e9q`)$+5K_;ZXVnw7z3&KJ87(xuio(D5T8P;O zG*Vx05Nd_k>sHJGh{Vo^TA0$d4DSXc!nR#G{a!x<*y;QXHG9eKO6E>3ow0M4x-PqmO8Xcv?{C@H)l+PPwy&pW8SXwZ9Dv1MdqK% zMAYbqU|f%Utj`;U!ZY)*=5HeRLrdVgtwy}lJs}*cLXfnaxxIIF&{Dztn*+H>&alAg zOOU7sgL)1N7NX-p-u&xUv*Yln;Ixq+3o_JWGh2edt;Pz(|T&C{OUm*v2jpSW) z^Mf~ahwV@6V|HRb&W}&R`rXWtmgs+wdR~4&XDo95BEBRUV!-YQB>ZG<H2Q~8^Yeqe&Wz#_R7gX%4&OhdkgSzkr!Te z3xOARAHjN{cY8A|%!onD z8s>}kc4l~8CGs8?;a?$l*UZy!uWb@TLv=BBc{nn4$)RJNJh3$sbxZ8Xmhi;Xlq7r` zVvHY`=->N8R;If2eX-Q@Hc#jY>sjwbXRmSObbDbv{j0BfFb9Co^NM?RP+ad#c5yVm z`Eu9E*Q3TV3tyHMp<$B@W{~UdQ>~5z=lyYWqY>U2TVZ%6Gt7=VV6CD*=BY;`pi&>x z))ZnQv+F`<@O{`Kjf*O^4^Y=1j?p7KBLDX=+{kdl#t+u8FLZ~YLo71aPK47T=B9m~ zgjg%|xtaHkumqUV8Q| zz|c<~(CinEqo-4#>P5fL@glVPbMI7M2C00oFZSJFTp^1~)HDA(DxSvysUFKH0YU5s!kG7|;lV=kk{rZhK?d?aTKKQ#USe^w{z z_CDUPg6pR&=H*pO_D^S!r{iDJ25WbY#)$xTbWSqDvov>HotuS!3SRIHB9lp-{-bjy zus>9eJd0G=ky|g-kKp;QMa($!QIyj!+531X9?sFB2gr&u)m)U&|Ki#%6s{j5P<-A9 z<>yNg_B;zQ9m*kfA0YKSnX$5PF1;(1YMB3*Ihmh1<|gqTIzJ@`X=GH5csUbZi)O+9 z2{rDQi?QB0hkEcLT+ftau4#*SHgf`&?h3#VEnskaJ5=2$fZY)X)N@a<-ZlnnFI!`P zd(lPJME zm2@m1uQXzp3}!oj5GSp4sDG!2jyWXCMsB#5mWP}|YfPJ)j1ML;m{aJ5rpG0CXjp>u z`~q}4*$&$;eHRK_$D@0mJ4CZS+E2HI{t|v)x;rvk+=ugqNY10UPwZNV0Ts-v*}xq~?P1`!l0SFZAy~~CYQ@@O-0{jIGq(WRC11rSw;y6Q|Gj0? zd~v`+2mk)r;Odu51hwOx-7f^Ys1d0vwMJSeK9{(=+O;ZQa-S-7UwqEHJDJb##V0j3-nRXSo#TI29jQB4B z)tkusI$;l=)wxiZ?2hVm_Ngak;qPEwME5Jl$s4m_ZB~UBZF?Zspe;6bQ^V;@2V@R1 z#Kq3cx6sW-t*jk}@AbxJt1yh*Y=T>-nWOkM4Jz$R5PJ8wh**DD95pn+t!%QpLkyrg zn%b`&c_{H>j_s-d3>=<>(ABz#I7Qybh;$^?SE2eNGwQE33mI~gH{W6&4CkhYeC&{S zBo}@9I6$V+2VWn>;OrwK4F6V+-TyM7R9+!D*K7OrPa?{@QG8I-g?IW4T*xv&>pTZs z8kC1EwQg`6&RTv|Dt=@d;IDis?qtltm<7`HL~Xae6k8TN6$_T@VqZ}bCO^=IYPBom z>v$hoc){~~FbvygqAFMi2MepQy)F&*<+YHims)w~rTDr0KM_^R+&JSn{4(a8!qgTj zm>$OUW*%^K1}tQ( z5kU52%e@2`9JaxddnIILW?{pMLa4}h!oi+0@KzfQrEGWTuQkAvQTA9kKNDZ-oG{^x zFZR1c|(W|2FxH%}oOeRCHo4&!R}?Mx^GVawYdH%|Wa~ld-S|v2dZ} zb6b#4u)iFw!`nhm=ZCm4*ciDdV_};%0dHTk5BJPr9psFF4O4KkJL`E_a!6;?;5+BO zo9FQJo!AXEiN{3FX9JvI54!TG8nhofpubxtE>SP_{D&oebS3*@k~%uRtHPhVacI6? zgWXHILTSk(@x6^6F6f10M>{S2M?LMcPZ{*=+h7EH^KS=IP_70%*<68Jd$Y+zDwWJ% zlFG%gIQT&*T(~QS^frLGh8|bVeQEjg!mbum3l=FvZjg$m1A9% zB6<|RVBWPU#*~Glx|<%%cDcggD0%+Xc6k1G5^QS{;nl$gX3HyKd^-=1s_7&8*$z+r zzKOP9)lo)nc3f9|bm1Pyh#bhar<}01aT5L8aTvBwA6o|$<4k=rw!bfd)O>A|+sfE! zdQAA=SEn{SklY`ABxwVI)TKuy*f2-h8Fq;YC>45;-BN&)DoK#rSp-vgSriAo6F2&r z!ujoN1S_c_@g?xtDjWA6GGE);4f3x-kQSu{tJRgbKQavu3MwJ>yn_X8k@D)PNKw*8 zZJH;tHtS;XLMw!oXToy6C7x+8Pa?0x>i$5x=Kf(wk9;`-(ZJoLL~5v)=u@5}x=rig*W8Y%xeh zqp}uy1v=r;)f~(_?2Pudwpe^C5s%huVeZQsC_SIe`DHDn_V4d{^}sNlO~O~v6vIX) zW7TOj+K3Xf$$fA@RI#>fP{l+bgkkzX5vJ&cwe~ zZ45|vf?|9Q4k|HAsD|3@hE$B$sfUCUl_)!thQWKPQ2(q4+{tU+o34q7@d4=m-T)4& zPLOS79xZc|f6kqZ;ML^b-L^#6Yh~z4&ee=`?$s8^p;xa*BL2QME~is-W@v&N++Eo2 z&qv1*E-+X>6R&Gi@I}P}JvUb2xNR}t3!%E$+IJbNhr%V46@_B~v44=ge219f; zwLrqZd_1I1AtZJt4n##F)YcHb?EgIZ{=CXBm)yTw2K2-rRNiR+2cOk2c*p^Lu*MECb}^9bC5K@c1YijXj1+t45p}|R?86=k`1Xv zCZG0pCj1tXchTsF=Utc`IE{V-?|eLWN~7ODpEIXU@N51nZYF7=Qf?Za(sy8}W`~uv zIneIH{2bFMFkTb^+2`D|ZYsr*_&G2gS&pR_TE*tRzeL|n6R`iJ2h_$H;^ufeSX5=B zekJ*58>lZ&jl}ND%(x^^^ZnOM+-WSt7T@-;V7}-r3w0dPA?N9h9@b7}PQ8C7A|0F% zFx?vlvdO49tA}0g`Do=c(U|-+spm=M*GbhSOsnpMjxFCslY%BaIZj9%DbPN{29GA@ z!o1QMr8}mf4f$hay zZ0Edvn2|Gfq{PCaMjtfV;52j2-RIOoDmQIpd~%IPa+sMbvy9XmoYKNCUn`<6K$uO-1v_ z*>IqaEMhMA0PMq4Wh*4t6AmWIn35$2dDl_&dAq@9v?2R^2XwZ{zyM_j$oBQb^^{1I zQg5_rU>U|OosEm!J4)r~c0D79Ij&E|`zIQh`M?tkHw)BHVa}RwK1%nyz~RIU7+WUe zg_;RszZ9TlIJve8B^YHUi?x04322SShvuo|IGSN_G<|X8>`pDCPiJBvywVb(L9WkZ zwL)x|k%VRHCD2mthSQuEXvgb9Ut=Z`{^+9fRtKbC%EZr`)<|&hhU4ZK3|eRbYb)kA zcx53-r37Q$KZ`q2r^M;s)LqvzH^bEc1F{ zQnT05oIbv-XT-8(W7Kh%?VY5Fa}HB*)R8%i^IdV)HUI|$a>(GfM_%Pz^kFSAQlnCG zp0L8iA$a({18(@p!R^d=ay(72Zjc*{9B1K6duuXy$d<1S!?hw8l%L`(l6p2(GP$JY z7gzr63C-b`#hdFo&{U4a{Z8b@Y`3QFhFq>#Ya|bGhJj8p&V(8xq;om?aaXqPQW>Ov z-=&@>_5G{LyDdgF+z_|lnPAG}1U#-Bi}Cd?h>=Z$d?+w_^9+n#7{k0RE36(+iEk70 z;9pmaAAdUF-;m2f=06SGu=2->GGiPFutK?94i8=r?~{PBQuqHYHbq3KwG zg?{OY2GH)=(+yp9@?Vb+(1Nb|zYtY%zbP7w7LW7?nm|Zhz|aug%8#SIqH{ zxVJc|-3^v29*X(gX9rygL$bvL$USgCM>FO)opt6;$N__H&cauH9eVG};ZEkBNj~{Y zj~d0-4o8IF3_WUg)3A*UIa)U9!_0#=^+cVclc2AafON7?EPpW1W@Z99ovdX3^Aiz% zjtRl0)SHaRhJJ76x14uFO|M+2S-Ydtxe$aYWTU)B4{yKLz`lDH{0(a%mGhOY*b!+V zpT$kj2{@cO5nrfZ+gd0IhXJ1@bNWmTYpq|jT@~a^OTUei`q$y*}jUQsw zJT2&r<@}%X`RHzTD7#FZC%N}2QQio4jX_R#OT;)7;dBT3xLJo@-_--#<8u}b4!ZMHcFmbylzI8Xmy5HfreQ*M< zUIyYmXW>7-F4xsP;1L*u?!TE!S6qx*?O9M*%=vJZG_JH%|EAREyi~uM)bpAX+F{3t zGs5UAbKKHWP}j)-!oUj0Z3?hpBe`qgfpFZBjG$N(Sez|IReBB{Y$=CqFD0x>ZWRv) zsA1v~N9HFR<8+-3=H_NH+ti-SEe|vvi-hhz6I5##!e2WXS4+4z^q0rf$4x?g1@*gk znSpUu7w(#t1ahQf=4~4k&vL_P?{K70$69Sr1aqZC99vh6vofvX&juN6Uu=x@Sa;;M z=wJYI2Rc%#>96dF6D%#$|pg0*!f4BngJNh|kT3E9XwRf$D;JrR_4QzV$1!QCR6{uFH( zxLacieeBg~kXyRoo(RU&!6DV4i?aMwFwrTAAY+B^dP^|F91eSQ|g z1!@_yvERTA-BrTy{(e02B7wXyh1j?!hWw2pgyek{acUbxT6t)Lku~SV0&Gh-%T%Ul!JDf3!+_4GVt65Sf z7af^`%@5dD`IjMJBkQ({#gfmvYNdcnwKv5V&U2=pi$KMGUA(ul!%@}+Lz{se>-=DB z7=_V2m|3~99Ol#W@UF20@7Hz0_)Cw(W+z=t`#BR)0rXoq*rB~S=R}3{;r5w??aItg z&CthV_A4`nr2(Q6>t}uv=C>b<(21H*mY=~~OM&wP>>-zshpybS$lQ#_<+%yC{?7s< z8cLBPvY;Sej$G?rxb^LkxUy0kXD@}IBlpzp?^t36=SyQp*rD!&1CSnvedN&9DogKDl9_Wb$)59@3&lEVEj@Hz)0m-hAOv5U z^_W3EyiBr>R%)J<)Zg#ZIXT#^{V2w+F+j!A$#^E1iU1I{@l~6Uh(O z!|QoP=&>seXXZ)g7D}BbOXVa=-M^pxbY2`CyImBO&PA+$4NN{zrv-O3oGr&ZZG9ZA z3_^TNHnwX|!sF>HP=9*~6vxby>_3uvo>aYCtc?sB?S6?tI+{2cZwuQnW6XA9K9gcD zTng9+ghU|xPb}(P?eHa(`VRKXk>&;Tx+-HpuNLvx(+nC`kqGFlgAJQ)v93NF``Mdr z8RLR1uTYGvG(%KwIY#%(Ms{K;q@LHb=btblm+>LziKDDpOF99tbc6$Y-^&}^abijg z)-eyuhWxMH*Gi#Ee$Aeh75I|X8z+4(35~hhNQ()>mAyJ>D0PL2Tn73twZxYn9#}g% z1?vtt(l=5DRnFOmuPub<*jCXz{ER>w1F|B6@hnvz*R#y=!Lk6U9c`Iy9|+6$scnbsEfEQ&ddqVfvk%o))oZ9Ksf<= zug#HoyaM-`5AczCOsW1j+rVBJtNueIjvNK+xfU2d!vxt6ZQa567}%hPuKxDee3d-QDrYRnn}*Eo@iK_j=+)=Ki$0s6-*r&S ztbp^2W%!%k0R?%FMafg-m$`-fNY%HV2kN5CnI5aG)CJPF?+HY`*hQ= zgn0l`=aECkcEp0I%>0~UggasB$Q`GSf(x9>PbHV1ytWCaoY8PU5f|2QuN%W$!%J!C z_O%SXZ?;8byC*{ai5~XV1mR9!LtH3kp6t6kyxDJ$d470am&>a@5mP$&0S!kHaYY%-sD?Fai8Oke|81*dHIsh`&7{n z69PX7kv|RwlbC51Y>bk1^c^-a4|B3T28?EgK_0n&?JRNTcp>uD=iskzA*9aP6$U6U zBa^w~kBT_A4LUdX<8Xcgzvdw;Ij0%nG_#iEH~`h>PTZ z{vU5=*_P!RZEXuI?C$PDy5%~hTR=om6a_I5L6Gk561!tr%dTbDvb%ek*xjw~-243p z&xifl1Lsrs6?2Ypj%|{Yxuer?T6Q&ORC{N-8;Y*W#hInAC6n|>Earo4NJuR}=Wr_f zHfx?2X?^FXT4(oGImx`SQOqc2q?=G{i8GC!Np7di*ZL0(5&ywt)=aYzHh4ZcgEE*Q zeMwFA^P1|AY|8(r+r#guqnk~*eoZ*+OO0u=*`1C$nRM(kl*LN|I5I6czmZK?0BIt7+;brLXwNr2QJl7*(s~PHn17Y?Fti0jGNy%~v@E=cJk|jYOGV!Q4 zm0J6bt7kasH$H#n%|3E6?0|U0xcvN-+D5qdjWy#u$L`g}=lnah`(}Uq?u?+W_&!5VN$y`e$x+!Oe1`2~aB-i; zqhZz@SzCaeStUSW4G z#0lH1{sE`*ZC~06UMSan!6Ud+!(0 z{(UGX&P_+t??F=@lBVYZO+Qaf>uIXz)6|EeY5(kK=6-d#Z4FMgGsEPn%q9Ek^5TFi zGY=(DSjUB7nUGcx0$Ix7OU7GeI56r8_{28az{0g`y+WtAC`pdrUP3;Y$XQ=5y zTjwEM{F=nb(Za=>IfFAtGpW<2oUh%4P4w)#`rSz8rPag96o2IQ4Ngq1OlMyoR|XCV zru*esI!rKS)Z`Mh+Dg9t+EO&F*LF%B$w~dHp8egMK0TFn;Pt z&+L%sduEr2t}BHvCB+)^nyW-t=fIJB>eyWa#$}I{%td2%T!e6kbezg;Ss)sys{c)7 z=}}W2p2=m`_5|L4mE5Gcn(?f>MQ7!(@tmqBeVg39VQg(G8859RKU{u?7tXnG-Nqk( z(IkJUG!;f&A$8?k7jRSjKpO5xn$|mbP>WiNYp_brEwxsSCt`s)TEd%r9+^#1h9eo( z#$q;c9CcrtVc=Lo&!OViDl6sraBVu*+N91%rtis5rHnaYMxQgj1pF+faZ7h5b&jU# zz)XfbbR+roTs%_Ch<-JbA6?EV-*2bYfKL4rT^DY?tv3u*T>}04K=LT2&3#!Jge(byOv~1?@ncEO$X*IcBaGZNOD96xBIi? zt`^6l^oz;g(3m*qCn|lf8Ro*pSr=nX@-*pp%Wr?1%pVqeidNllD#6|DunH-rK3P*;(AU0Ic(LTJDr_uzS*s~bmbviKgKqR3C;7h3e0cLpJh^vM**HL-#WEim zzchxUyUKX}PczJJ-BZo>|Nr^QYSHy+?{#9|o^-OFIbpV6SUxI-MkSW)UQMt#SYOJW z`c;YS{ZyT6Xo6+xSbi9pa7gN{GRfq!*${kxjif9wj&@It@OLbvNjte8)G6WatC~Ez z@j`K$;I~(3{eCQHJ-Q+w$b{H@QXxx&DxXTehG_pA`sOSEtu%jN_T_#g@0zUEk0FK1%oG-jk!aC_@d z<<{)F(h0Jl@485;{u+pvk2g=(rZM}F+;8`crTM-@*1fmoRE<(xoU&<=ExAJ9rB?X^ zwa?g!?_XmmN-(10kmRblXYlBw9oH^QW_+tpXUy{clY zKNkwJ(64FA712CAyOzypVJ-iBH;vD?LK!>6k|ih0S+zEssWr-&(WfC-DjuuT*`_oJ zjG^u}UCfXBqdPr;!`co+{u+w@rC2N@-SPV}gSn$}IlfqOBQ^A$U5{)?U89R?Tg!o5 zSr*BALqp<@xzPPvDkG12(5z5AQ>C#Sa5th@d@?1Wv5bEqxjh=rWi<7tesA(yZU1pk z9atk={+Q9MSS+8smbtS3#ZkX?uEsoZP0z`Vj(k?} zlKb%`x-Sz?y3wn*J=d*l>2)oWE21Mb(u<^3*hJxVN|vWlHdgXGsxQ55P5WRgIvrEP z4_;J<-UqPc>=fLM3}|jVSael6B>!uP=kAHrn-axQb8`$tA9u4yHgg^o<1)Vn<2|3M z-m`S66fQvH6_!{WkzAUnOhz>GqP}ww_DdvdPDl7|f8{Z!c@jn`1$aztOjOoG^=*28 z>4%S|@|OuN`yAQPFAd`q2adM%Va?U4oDM!V79-&Lq*R8I&d9EE<>D8C4~x`?GrbT%Uk- zK0JG7jA=I)b{2~s)53-Cx`FhyjwJBqAju0W;6Yw0UHTWW^>71*ls{CKhxDmeRk$Nw z2KW@XQE)kl9`%JMvBQ(LFQ)N({b0#nE~0zmRE$M0Ro_L6iL-vG`+ZGtYcQObx~4dG zv}f&o;nSNtv&Ybnqa!BZ*w>VgYx8k(jK?%Uay9;s>%DE?g|8XU)PNHM<-JM1y@|Q7 z-#yW47te_54&({LwX^v&%$~Z_>sEoi>oJL5Re4dE`6kZ=sw?2b0Pt0RX|3vyc&Ld~;Q}tB)vx>s4>xiNm>#=o(j>M#A1Th#f?{pEJ{r zr4c;anZP{Z z*7xG*e{l@8mO0Pm614j!v7~M(%R=k1c-=Que}OJ@8xG_34Kt2sIWzT^=v${bP~BuC zM<#|*IoFog_63yNr}O-C9>L8UQ{Lvj%AU|q{0v^)IAe-=rT7iyci;4^1A~ja={sa9 z@wLErM=m?_6QE13#($^tuqFZbAE<8*l2_Mp6h14>`R}(g;c|cX(;7mG`v}51Bw|$E zoa3_#2ndg(Y?bg9&Nt>w$qm(`x#(W{2lK#5@=zu?GeUCL{a@M=RwEFPO<^1m4_eo` z1!Qf@;_9G0ZvAYC!=Gw6HR#7}x52^(HKnKI;|_*GC#12 z=Y^-}JU;8tG~}|nP|=TgHJaG>#?%}>m;?7k|5+efwX;Lm-9Cb|Z48+1mdo!BQ4Bkk zkNy%ZEFJz+Hg@`q%9z0X!xp6dZNv5Q430K+!}7GB@JS=Z+pWj4pLvprJdH)KB_HZo zO)y*=`=e7t(kJJUlmS_eI#`cD=GtvnBOv$Gi{1F98z2O2j|tz zQhj0$hLKxee9(o$*Snd{64CJeYBvH)Ig>r@Fi3I=Vg+R%l#rR z(Pb^lz}eUppKhah9~zB|=nhKH%lWKh5~aKHHLh2FusVHf>{UmenJ~t3GF`vxQ8mUx zwCKr<+=MW71JUgj#fiVg%X=uFgN<@X`I5=j%tz|?6ydl8i>~~25E5LYCTdv!M@< z|BYnvLrZdMNZziU%-5F|Xk72NQ7!xqzfi-=jOZ`V`%NEXPKy?DwLG`z+u+;Hk7^G? znJ>M#U6*s&G)@m(!_|5V*MqL1Eq$%^x_WS;a)UY^P1`lrDdoC)Jh+YqdS z?YKESmpI={;UMJD_H|w9A757C{zmNZjOC?p0cMF-bg5oCC&r0B@`3P)L_@3h)t-Y1 zW&Ej<#b@a&YFh8BU2R@|Ija_gS}?xXWPbJ#Y55*Y`bu_zb}tWNd-!sDRyePm`_RlQ zo6KKfylhdd@$1yIo<^jG z&o5+cw&dS6D#qqp4Sd>ZVg5{yc`KY)V{C%mTo0;DmppUHyZ(|eQtp6J?0O@d;F1DT zjZ+8}ot&oiP7f15(CYK*h2#kaw4A`lB6I$h_4;*N;a^M7?Pusn+J?yYyEA+HWDZE)dV}LdEEJy1gV%+0G_6Z(2_rgX zJCKIX0bCtw$R|H14EClnPyA}z-uvLVEt38ptmr9yUfoSL62=Cl*Lk7*i<ur^s}HwL*u132vccT5%_LLUA6+Zl zxZ5CBI2cyUXjMqjKH*hvEnt3DU4~D(q`G%E#@$462kM#d(N26bE*W^uvM0!U7%fLd z&~k+t>*nQgtte4Eq@rKbu-?GJ+v@zT4Jz`1Kk346uDQmLWZ{^Z9na;*cxzV4yd!5p z0@vqAzx-M`Us?%+!mXS$ha@w8R8@-Yb@3K2|CP5yB*} zre6>7W928ata~Av*3lrD!tS zJD@E*t1dN#d9X-wJRV4&$oP{Ad7{VW*Z#cqG@!`^M;Z*0oUJ5Ba^#FyWAzkt?9I5@ zsE}Eb>ptsx0T~BdGb7-HvKCEon;Nmg3NXaKzb)r#q{}nlgwFKA!nKSdR`#2n{}odz zy2?il%D5cU0LS0YRapZ=PIU>Pfrap?uDfIAo+)z?7tUM^ASpVYHBmMU6b{dIVG*2v zUMzFrY8={gQ5onr?xSWR<{a5NFVzA(SOB<52- z^sTbmRR!J10bJTGpGSfrF&2`~+gEs9(r^9SaV)K$PhjUt3$!J_wnb?I^*ZEGbyHoA zg*;b{#~Ks-K$rtPC5LR5+^2<)qW#g43*m0Kg+=o%%T&C4MYL-mS%=k%nB1&3>+jrE z*-4VWSZ5rO6Ac+_=+3YY(${b4Lio0kr1~ZDdF5ali2vwQsQ8)3$h~nwLt*TS-n60z zKi!S#A~`@wmiFA2fA4JN!aKjQ+;IzK$;`o2Z;-{rCW*vr<#53Ep*p60Q;nBg#J{p9 z@pyqHFB07d4v_ptOGoCuk0v@g3bPa^7B>`KbB#>2g%|wFt}7x$R@*J@2oeuvW4Q-B z)Kx66nnB-Ed)A9T>SX;W`q)_sx1@~h;y6l$TXJOAG1X`H2W3CZikMBYEERpos=Y4E zt&xHL6c^SHnoO9*G_G_oBx#`Vo`kcr!e=_ho4%;1MDcIBn&a0nny-tD**I3_nsX)p z;+``)mE(yWktA8X7QBlu;=b?>KYfF^ zzlh#cdQ%zA?Ija1hZ~arH)nb_SH4uiX7VMqL-Ki(>x^giT_gJBd2oG@_#gYY@Uc%2 zwS+S_snS+F$wj1mN@ti=0j3=q@b{zJYJ6fpRx9yx$nV1BiW67DGw9k`_Oagnc>Rpy z=}rq~-Yn#^tbbv8Nx#fa;N#(Ob#!RdG&68HdP5_+%^X`wJD+B!7OTs z?*8fR+H=G!gOf94j`!DG!j?&%)aBXeSl6M} zpDiljXbA6wQ`X3{C+&L-;cSBVh=liBBJXubcowmq4(usdLjS|E54Tvv$Q#d871O86 z>xeNOpZoD{!63|sgVxb3ieh9gdTla}c~dA5okNG6@|?Yl!^xlkms{G#|-}c8` z{PHcXS#bNXGlqlWn7YJ{>p9LWygr$|*23yrnTy?d$-^(lp+x_fM!+pAYHZKv%P-*?O)p^K)&?x;RE3Gh2gr^Z|7_{MJ|7Q@G+d|V@H)C0RS#9FCiV5$YcP_iU|a03J7E1Ghpxlj zsB<%%qM~TZcMN8IZXRt%rm^==0oH43aD2`iwQ^4%9!q~{zyUK_yp?q~Fio<59Vql2 z#~sn#FBc}LmRBwtnq^Q&`UdkY8e*sOSj9+R$?DH2+%iP3IzTv%mRYQleIj4-l6wyn z2HEPtRB0*P@`vdJ*N{0r?w)|% z(&y{nH3OGc`5OP7rr%%6{notya7~5wFvW9aESE(Gv9!MQYg%P+dXJmrn>b;;OY|?F zg%@z9jEc<(lx!?#XHPjV)&HYH&4ou?G7Oi4#@NaHy{-6*wc5(Qnd{Be9g}GBS-AZ5 ziYTj>!u|_IMD=aR)5KEsOmytcSEtiplmSDx*pc}=orS5At<}YcRw{{v2V*kuv4DiTIa<;oaK9j;AUTqc`c)s4!*v*I5+l2%j&0 z77n}rQ-^aZRo}CESU(!ijv>O08|;cr_jH!whRuL!v@w@nzV2Y2b}pq}y##{ZmI@>N zrmCFyRK;o=Q=t{gGgAZVi@s{$Y|$aza;CxaP*Nj=ud&FQrbkMda3PiLMJ2R1uEV3& zAJmbm#w5ty>-^e*+&$?;y3A2pU9{(3doNxFM(}Kv1r=6BJYSwdrudTYczjmV3aYbI ztKa|fX_^+B5ZcEM`&iL|h%R%-u|T?Z3}MkjV;;Q8X7Sn>=H}!wrn9iwy)USXvrU=( zA`E*6C~+6g(7as9lk%ZY>ky_3Gx^M+-t2bG!*8Yj>B`v7`S zl6#9)7*l5CkSY4eQ)lLg?y?xm8x(jO|k?@48PP#^EEli0h^n2XcgX#FvRjZPktIUbCc z>=mX>tOyccl}`OslD8MJ@@6NJf<>>?n#LkyL)PtnGYWF!aLWZSEpU09ux9k}sd*rnJ9vGC4$1Wz3?>CGw8&SmH zvd+KiTgJ>6H&wxb!|L)j;kSyW*Xyk0#Mn49N4V&xTe$OLwPbl$2)BL?-0>>qk?4X_ z9Lj0Xq7Lm_+*HT^>rea5!9+jML;IK;^<3lFCH|hjj9i(eoyaaLV~(CF#H!a+I<_pL z{qZ^+`l&^UTQ8=|oXMrDK9>`vzrH<|Q@>q>!QzJTyEDLu4;T^O+|neO79ZKyL0&)&ib{%9##53e%RIP#uLw^`&DR zb>HS{ojDnsRrl0^3tlB*&2`~dxU(SSJ6BzXq=24pb)ZYjO5{#3~kcU13-2I#E{WUPe= zbyrEQq4;(UQr#K1a1>D|Bm-r!Ax&%K^QyjN&AchZ(We&Q9RDbTIs?h6J(RFuIcJH! zXa8Bz>+cY5{Ze0Snoc5olQ}+5^5|ikNL0M!i~iSwe?~o3=O-F7L0COy0S2rb;6j2$ zI!A=tyI7~t-=VZ1{%-}=p^@!R5@#R zm)fs}PoVUP0h8rUyxBaOv+W8=M2D$A?y51d(r>I0OuNqpI5f8B#P?Lv!|gFzC;6$H zr?R!5@VvH360+Ro8(uEpn{92@cD|=NFYd?se&T^_X2z!q$>trGf~D~FbwBzsLiz=1 zF4oL>md|+GI12U`NY0K9S9(2C-rr4xD-*-i-Thc-HL)6S$)X}{m|o+8epr>nSaOA>5w7yQe1emReY}(oyD8uNr|j|o&Pl>O7fZP_X~e; zcnOClRgrAxO4VSM=<&>h_-ms+PH!Ao{xFR?k_X-8%}DV_3b)C_ir1p!8(^NsK+%`n zZ~R!zHu|B4_!x;EUl`<<8g)0>66D=1T}B{xO(Yi{3Q6oEl+Cc-OAbxL$s< zYOJ#Vs$xrJZnShX58jF1ZKmYP%6|0PPUZn~{HS+7oTf9R54NX}Lthg3^SAWh9@gQm z{Y|C&uMv~}n#7R#dXkf6D>{QTp1hFE`fH-0=p8|gd?Pj|OK!ld1lFG@6y|ONd^g`v zb%z*|zh*2UNBiInw9!1@XR9;tek^JKdxjW~R+M!JPzl~^aQiT>xbP4F|P21Ym z9NO%}`e?~tIpD#e+rmr#CyLp7EpUh`piWjIS6)cJ)wC817XDBz9}nc2yYOXim=hb} zz&Xcs?#VjS<(K@{M@hzEGYf{T&*h@@y+dsBdFRl8NwsdO_P@;Jj5nO*mpx=3_TlNi z2;$~DaVbF7RmmFtDQnH`KPBupO2#`ipDJ1Px&Pt5I&Evp(Bb}s{u)3H$r&;6Oy$FK z$tWE!o~LTjbQ@vGzRrdGQ$L01d79UI{pgK)w&00cDC^OpcB9y6Y{1e@!uJl5d5*Kp zo41U`Cuka9+L%e6X+C$ZrEqCk0Txf%V0_?{a*7zpDamCD?rKQn6DMkJPDWeS&T79M zD2txT{HfN|F)QS9q~sCFdOfbn3Y9(Ol&ZDNPkxI@OrC3nzIgejN-tLDy(zckt~}g6 zl-kqn_?k7Hm3CRI$tc(O?=<~9TaK&B(ejHbLr0J8--4JS^A3;8_5_HJy;xiNXP5mX z_cfN^c_x@tmFFif9zFL$YK(iNPRSid88qxz&eSnA`SJ0!x6+HsAVa)|3}M5dRL(hzrn=W?=6gl+sFn+9 zBPAztP8NsTOP|oC3XRsiR53DVY_-Ub_ihH5Z45Q;MDy{Y16!*Uk^U)* z@Hu&egg0c}o6G9tD}Ca&h@T`>mx6mXq&7>!CensEwH+8Pv)2FJo3TiZsmfWx+nLmhj zaLqH-zm6$Z*MkX{{@`i*!F+HRUt|~YO?;93%@~1bc^(p8v9?~D!U zc`rcp(nh33N}kvL4B?Qw5-)e%`#UA6B5{#@+^*Eobk)DF9#M(j_#G$-saBBlTGkL2)$ zr1C_^MZWg{cFDSXHs6XZw)u2;kwl#a`2@Yvmha)c^3E7UZEt^qs+h6!gEjHBvuV{- zcpkNfGvR|gmoo4EI!yGm`z1@RMIpOp*P-r;XXNwSuxdvP(aEBZ{rX+4ANEmoJ=z<+c~+cQ-G_ZYoW=Vvg^zh|%$VxH zSnC)%O%%?+h%6f44rbh_Y~Homr(8zNQb}irv-ha5$$Q(9ds!I8Eu=4C+MRi|L~nLs zI5p$E2y)5ceT-_8A+JTSiHq27IoB#(AueFwv~F#JW1Y@h=ZRo z)rS|*aHp&pM{EqE->{65YjjNM1zX#ECjd9U+lKXv>a9!*fU)2+bPLtSbD=e6x3~Uca zqSsln9hQzDW;ckKkip}68+4S z=ueKKt4YN_&z0tm8YXxFWfNq1{;{rGFOYpfv+dj?__WJ>kva!(1) zpifot29NU@0!+Qtq#^bw9vhvPhj{6S~N7~$4@V+j!9>6U705h z^cVeP3=fYgY;DRZ7k$~wj8cv3{b^U7dhb7|RPlG@*@kdW=2gW{T=0+_RL5@~oarzE z&z6ZS79F@_SP5UmkLGPw#)$oYRO+4@Ed3~)1y|8H&9Z-!z7iT?{$b(+-1{$t?Lx?k(lEJiZgg z!!mv9E-s}0!l`(CEMi$i4Z4p1pgd*YAAQi9vofFA^3aA+chlj#J;k?2l%6Qf`llRhh)ibGu z)6VZIQ2cS$i+$*7ZA`#jN8TI}MuLGeqtE-YN!Z7B)66*}`tqKV3sEUPx3#^sFfo6p z^uHTQ&m@qHdWP(s>`XPuN9wZ9nRXe&>D)JptU4CL1u5dr+hk%}NMEv3ZEAXbQ9Jyt zsNHfT-t7%}9Bd6uGU@(Fp}%`5SH1?YGDEmWNqN+Hl8nE;tlKYyPdojs>U+e91A4(! zJuCX2Q#NQtWRN*Xbl+!#xmh`ZhBngY8Ya(ef5|BlUr%t_FV*X|7J*v*8Eq+=nM_Lx zKe*ytl0l4vC-;7aFi&(yO)IT9B0i$cosxL8UGw+nwx|tDm%UeA-k1@6Wh_qPCD-Pm z9c$dP*!oa%xzn6!_19DiUrPS(iW2Vqn?lCvQjI@PP3vj;d20H-hYYR9m&4Cg@zg=| zTQimWavpB(D*Du*EOJ{q($Zu&kK@Ad+h!%Y0 zNPlVWjZ{*u*)jKt2OeExXt%?f^}@ak@soYSqfp~|RaUp8Y5RAot)2n-DFL(n)F?p45su5@k=#BO zLBL~cZq61R%8GcJi(c_(d;`w-{HJtQSdn%ufl$%I|CBz{xugt=9o(@<9YTxfR17A` zz0zg|8%L&7@NzoepSEU*`y;jXp74hnjiZ}zDYy1;W#6_;W}S6m=nZ%FIVICe-i_Na9Q0cRlx1S>-er7Yc88{PHOluczAm*bYuaHJ*0oouOfDx2<|g&p6~k?DIxqp{VF+M5>(XDuJ= zxeNIF=@;eS=e#m(WkAY`DID7=_mxb^!>pb~w+P`M`UKJPM=EQ5^{6;mNcV@~j5u4y z=h&viS4eK|_nwToEjo=+inVPAbMa3a`Ex|i*JLWfHP*KPR*juu4rDw=V<)?G_9A@ zs2(*=|D%@IG{Gi3o&3^)Xit{h&W&j_`mNa6-;=M~BItF>jDBAv_vlS3+aiiI{#<_6 zT%yjN%Tp`luK7IqFU&5P@l588J37lVeQGd;$3^eaGo4ijaHrY6a(Hr89}`C`@Hr`Uip;_c0YC@>gGtQw2xz9j;u9La@m?LXT!hqSycXBy(~DX zUi@u_c4jp3&&{a*-Vy5q*=&3~1bgvGCBKrM*dZelMc?`}R=l{kOX=0*idvgDTP39p z#jITcx7sFPA8AI`fMRslT9Dqq4DDZ2Sbs(IoaQAAle4SV$5LiEH6*j%d3EvoK%9n5 zq%d(H#m~KQijL+$D+kH5a2F3=EDg*Yv3w!<0pbf>Ao`hwyMCz8YJ+Ot*o0N<k5 z{C{&)zIVxDb#)IqPYDp7OCnzz52W&39;ZJ|WrRmDgMxo4qh-g{l7_+=SU8zWp$2?3 zwkPpLDxbXE&~f+SqEP}m7jy~J5CIDdh%HIO^o8eIz_znyMN2`zDhTrn@jGgUWV_L_5nS5^cU^(BslTHl|}VM$Eoju zZh&aZy+nt5Rnh25G0o$Jr97Zi<9d}3t8h;EFAY2lG29Y_hk-F8F5B|ZCzHbF&e-OS z;@y`hcGwzoesw+@a+3&=^L6jYdaS&3S9LvXfMdvLcKZv=g%{h0lRiI&!3WJmCn&xi zheV$3En>`HT6E5=f?ZfYt~IvdrRYPt-*>}#e;Tz5#Y^!^SP0ounbXjo-g|Q>?3_lj zuIS9QtFblchgvHf@?tk1&QB2D@jfRi77Oo5dT}RO1v2kdBz@kP;41y=Lsb)4Q&}jC zRUOK`H>vTl)^xj-$?_=iGk5o3MoY;BZY})f`#y9|6bA17!3;Z8&V=t-!V4@RZd4UE zjH%1~_5IVisCtAqIGkK_S&PT{w2k;e6QI{Z?% zO<$|XSEjV|6dsv_A-f8PP+#-_xstsR6F7z!mNEFroaeyVQVw27r(u^;tcUGVo5vhf zW7>`+e`_vNR}W-Ls0RzCNFRExJ?+0vVU@gQyVaKHX3P`*PY!N9XQOdF@hEQheW$8^ zG3C6+Xriy`VJ`Rg1i6n!tg@$JAAf3Ziok4|4WC8Np8Z_bm;HH+*YAeK*9U5o_{zc> zk70?AIVW1U;dwNIYJ;3;{oagYu?cw3cj3a-GEAl46I)S2m&euE6Zl&Wg{zE(B+# zW4+9s`6r|4A${m(zb%>HV>;1tW`6&zLgw6`RoAlTYK*D$1GWU9HAGkZ5pI|-keTgK z2x}9J*8Zu|n|I>w`vRP5$a(!kHV!4#Y2W*{O8!fi%|}P$=VQW6PgiV9L>J`a%*jiE zEOm>fowoFrx8<{6p1*fDWnOhm^5d%PRfYM&!5u6;){cGIFZaw}dC6?<>4Np@LEN@T zAG9hJ7>Zs=~LU?kUn;*qO?+;_jXR~P4*zHbQ=9jbn$nW zd*72tI*T52aBMXqf-kAoxn?YreY5@yOO|$(_a~X{t35|>zmvakNHZm~tRKgE7oxpr z3U*25ObKj^N9Pl&);A-@jTf%PXX$I%dvK>&GXE6XV(`Zclj%|TTR8AnrxM(2W|Chn zXV48z$#g%ZJTDAl=3dbTbkOD4CntusP36H4>0#A&Mtj{fx;j~7;a$R$-)StjD`eBq zn#>J*p~|g{a9Hn0py(4;r`vFQP&zI<>|`zTC3gR0h!b9rMIPr?BvIHkpT@!=|Gna= zIx}7I;CBpe8%$_>R`fCpg$pNlhsUzV`z1v3U3^>5@02lnL;|@ocN#OYCY2+9ssYob zU#t~C@pgSuR(W7ooJwoqr9bR3Qdk>doXodlTzoOD#P>m@oDVwHBX{35b?K3C54wdi zRD5ZUa}+-VvS<}K80`fk*w!M16&FRfR!elTMro`%UqGA6I-I?FQ%w~9NBt^e@TqOa z#3fclRHoB!j&Rr2lMUNAAM=+ns+GPa+of;SRB|fk%39e;`dQ~P zocJ?jJO_47AnTGL)s2dIvnquTcS_O8YR*fqhpMCG0#6+m%9ktRcQSXSI8A(RwMGBZ z!JT{JF@GX`z)m4$tT>#;k^P$g-Q|AOXjm-1!5hXLJ}3F9XN}p@&0bi&!Ya3Sqlx|q zvBOTIs+`}7x)gG1Vmt$NOZYtZle*QdD%BqK;m}k&PS_6SVI9%L1q!dkRrp!*J*)pF zOJ=7%MdFt-IhIOwIj1cg(TK6T9;+Yk#i#MXoei5z7`4%p#FeRBjk3d0jp7B7qE+;w zw@*H4pR+mqCzFuk#$0@RLhaI){!8m9@evy_I@t*=nGw2H74FYqU))5~`B7%MN!3fK ztddFAZQ*}bsmiiT&y>HT5ngjf5wpDmP6vFL*n2$bQ=J&~Tz=0@68Je(c<`G`IBy*( zy3>4(>$zR1Mf8UIs^-algw7et&|?(8APgZ70sSWhRttxSra73;Y1W0M_JQ#K`zdE zl4~3+eQeW?oG+@u-cW70EEyHXaQ}>Oy4=(0U@aV7nGLmFIfc8=Y_PGFJe1Q(lBJr% zq3oJGt$3(vdi3XInK$zvN@k9MBkz8tVxcSk&C217shmdJ)#g0=ox`vvv9y=F%hV^=W=)k?A=c?vl zL(a$eVt-6@MNRCvB^=D}xputuaKKeZyk{dMA6E2ZH#@|$p;JEX?S3kgz_+TQSATRx z+cqXfc$MP&bMq5!jFq$Kg~tlxCY+O#263S*hq;>*dFP_}`_r_ZrteSF@8NM)RmN=o zshZsD!<`IkPV_LsJ;Yh^#U;CMs|#HNe5qe2oF@;ZZ?i6otT9mp+Gk^1@J6}(_f1VQ zFd{a16xr@pyp5Hg#mY>cxH(e)_e5MnW#;kKl(3>a&WIPa*Nj3g2^Yu8OEh5<;XWKf24D zx!!9B?B#iwJs=5#s#L5@zolSIi(ErPv^WUrF^XLa9>N0BBM9){zGsm6K(l-d0s^r zXejgkifcnj&5WSjP?%$L#OJUsnXg6v(|c=4a?}g;dZ?cG)&lT-V<`MldvZHLmwLyB@wrFRMuwhSbnb691GD%aXj=S)R)3AV-#e zl{G--x8FVslm1LOBi3i}wx0B1G<=3#ZDSgqEyhvc}vO=6HeycZMeyUhsBkZsFamZ^B6-oAdw#~wPmkFnMdC+a8R%SY5w>t9gJ;@q<7sa)arreYKqNCZd zGE*wn_;qSp&vQ~u;wGO_l@^j0e}57|C(L-i$%fuNGl{g7Tv_em?Cu%QovHfd{+owe z^%z!#$nz9ih1tFSA8%*bQ04YNeN;pgJ5aH^knUbXcS?67f^>Ix*e!M(ySux)z(Pgr z7Mpv1|Cg{I+|Q3^vG=}Wt@+M;rcL1VRQ$Q>gjq&>CVh8AKz0$*-K=5Mn1atX^xlki z#i)(sGT4-&wL=-|MzxEKocH3wOFc+2=iQn5=sms%rNY?f#(vpDzSHsXXb!Z7SYdhD zT=?6Uq0ota1ykhMU)Lla^-`mb8H%N8n)q|v9E}cnSfgSNPrkc1R&p-rqK{)G6=dU( z!OZJLuJxBVS0@GY7h`bjo+573J0DqNiGc%{Ba*Sld7k0QM#Vy*hTR98$&B5??%6IS zoC(OFL(Vbriu*9#jv2TxRSS2M@HNwb zeZ4cfWfoyQc?+H5f-r;~pT%0t!4=fA_p}g4`_=xp{acI)rD#yHhoS2DXVunh(W%4K+-!p{AFqi|ng*Q0Dpl z#(8@j_!5cD@fo?rcgW<|_@v(OIn`Wvv<Id{hc3Hq_wRi~gwE^j7S8u7)j}+^}*|fkmp+Oq^l2XO4aM46a;=)JVfj8EEzF@Ta(2Vh%0%R6>0?-94Pq8%A(A=h zbEo>@!_RwSw5%4qn0Fqnr39&=uT$9j%YR2 zfRs}(di2-AiYeyUCtZY+=T=bQ*?qNiG8E_k@BK~&NS|z6ze|2k+h5W4<&AjrR}oW| zSzxBsNX*;e%?>#qOsa8!O(WOumWvht(*XtZlm!vcI$w+}VFTg%>AlF^p^El-o(Q_5 zj+$Enw{5a<-SNJOf|;ZQu5UmGf?uROyc$aX%9iijdF;w>4=8ilfb$dtS_*D zc$JGK%z4Nk2}IVWFf8YLDlUzC-K-R>-BgIj6@NrU%y-cxPYq)YrXj@82s-3kRK6%g z%1#>`D2hbYgBYZoF^2S?Qk3@3#xZi*T6H^Peu0(X2|k$Z(c>KbP>paAFE z9B@7-5*Nq~Refv@?XlH(^Ccg>=&NjX=mCX+*Twz8z?y_YEI30QhJM2PBlT~!2=zoz{Dobf&) z1{eIO15(fEJ~@l?6!Mz;%0Y3*O_9ky43p0xI8MLVDrEtk=tA_i2bO>H#KB3?m`5G` z^Mo=?<8yrDtTL?rJrF(Swu(AlpOc=2VkmQGLC!XKwT<`DG)07z{p2F8>t^Tre!gw#jz%H%H=WQzB;2cc3goFAe7l z+uUp5DNn96_jR!Yv~Xs2B(9ND8r5!%K97pf{hd9gkMTyulT3`Ar-zT`?Ct2Ahwt}m z5!c)Wzs{2f$Q-cyt0;WlWrCgTUf7^kjxjeJ@%>yZuAk0s<@|dnJey?1301 zgW!Sp#X30+C@M2ocR?3>>#d-CqzL!vGr0EDA8*Ng`5vT)-OapjZ{=Y5&uWR+yL0G+ zs7t;kjx1A$tbQ=sR7~*jm<`4%mSA)YGszdIVNsjspFIQ8jo!VGye4zZPZFR1q@W z0|yFp@QW~s+Sq(#eKO@c$_;BC&VWu|c0}*0!1NIX$U0hvo{u^s=IVJdYNQs-*G54q zR}0N2O>u5@0rT?a=tOO0<=Qw*ch9bq}gtgf1gK;o{MSHr-Z^f_DdF}QAeupQ8u)eolrxu#{Q8QR;7r$b3+kdstx1qR%pIO zp7?DGY}#fA6^#UZ$u~x0YBjIDY}7NCURco$TT{P^;9WX+sul|qE%FOD*<$gRBJ59d zfYNL)=ypj()hS(^cB_XS_X|zYvk(^|1;w#FfKy7CmMk!?$q>C^julBon67UP=?hV? zRSd%-GZTb*m*Ve3cJ~Y}hl86XW zO68c%eDTpsoOhk-isU0pgwGajY(9{MgbBKEYcWTIeGx({%&|>205+X-ux~t|mR^pN zUpc1>DM!NNPG~Q=DsIl0isLVQv2=qrIx1VE)9qYj(6_x{gdfUNlW_5!5uDmfP@Izq zLyc0nKkSG-doPMBE40bL359{W4xEfE;5(oBxYe#`H+4YD$t+wjn1qMA?6NivN86q% z48A)6fkv0a1s@$u>Qw;EU%Hqu(-X~I3!rtH=c)s~klvV!q{nU;d2c?lYuVMy`BR6I zqhbbgJ=Rqkm|+u)I2R4vEp35&c=&Yeu+fC-W;>*-Q9lc(ZW?TdCB5U2G2H-PmeAuENq_6EI>=ZJ!Hn4ZV)z~U z3N}P=53a@Y7w5K7d8qDUjbCG(u--NmW9e-vpI1vBTLH8=7k~YvE4*CZiJ><&@NRw- zDk9Wz)Z7yqTk_E78nuv40sohGI&Ddf^2RKS8aM8Qq=tYrqGH6$7PV%6cNR!Md__k(TZ4N3TSO1hrx+<~00J`#)fX`nhR z1Yb)uaF+R=_~UuF*vk^%ID7IX&u~wRz^{olSVj(yt}dU~ZtbGn?ggWZYFO7qf8q-6 zdluWE8+{(Wf63>#!cOWeW*WBuCmKp1?U#qGmr5Y%ddVICh~E=lh~wp|_?zj7tdW{H zwBM520J*jUtnsBH0C)W2QBh+6sh%ZRMxEV}?5WntT`{2Ii5O_3iU4C*GC}pw{jw4smARN$=K~&hXZdq^1S~}JYdiF63%7#K5^Ks4Q2o9reW5pi0 z@8onf<{-nrTH=2vKS2hQ*p;nSF$F`IAIVWOqlds8qo?uta+T+tq0`|PF&)#F2;`BC zlJAy-N7~fvoWF~{n?DG*-O8xz=!DnHwb01C{oAg&XzF1PecM3Xx|M)8N(R_=kN^Ga zRLr?m2D#6&@N0Z8Zp>E3lzSeK&*XK+eO=7ye7x>JUrL4-uJ%p9L_J-+@G3{w=rq`< zR-)fOc{H2<6x~uM;t%;29l`{(f6@CcM^>7@9R@7>sI0IyFFQc`43xOvm}VWk|c$ zEH+j)3afvbNLUpZWlrQRyPtpFN_oH17+7#mk4qabD~@~<$T;~9rxsK>R<1CZD6 zz0iyW-ON=fdAp z7djIH5l5cfogR*`D#}9=nFRB0gup&F8etn;xfiKGd?GtSBgwXvaJ|4MKSf*db1^Gl z8M*XNmsV-xuW-Oy`X_Yq?9gvjC_E#Q;d>Hj3o3(xPA*!umm=^+NA~U<6qhF$<5zV$ zx@T~n*KUsAn@h0P))9Y=e9%yphq;`4dpeO{UYvy>t9nSf-X9rhcntd_c7IcWpR)({ zF$dqX!2G6JL3qQGOxKYFBi;)Jl`K_*t@>C>PH2 ztniK5`~J#ts2gqo?>^<2L(kmnuyX9`r+~cF4I=NJ9=e<_#YFmdpH@5Ll?SuyoBzj? zw}HEJF1l%3;ilyrti4_cpU7IY+rAK67d{oyv-MGWE)>6iu`gtaHM(3bz(+khco&2s z`B(~db{$ytsKOQYZ5Fan8{1h6P626rz8cEsFGAuxWb^ z)W{(nDO-V{{h2uVv=WXhr0}`wtw>8!!5<}SXy_W@l%6@vUNetKmdB=fJX38CgHn?< za&wBX$+{4~sH5!JyFm;(a8XR$6Nz9^4fVSdU_O9+8nV37RE*HOZaO--CnAv7nT`5l zj7ccL*fR?xUT^Z!o@4}c!oWcju!~u4<*CMqo@$Scp82S2vO%bIFg}~j#8S>K*PJQA zrN4P(QWxWETxVD$+!U^T^blDaiq?Vb1Mp!th#U3fAPdB`dO>aob@tcf4kuK=+b0jB zBFXcZCkMAJCxm_r^=qG4L@v_Ahn3dw4J$w@`-^LvT(Qb3hx1!)^0%sBx-A`xrq&=n z;+-(De<>!7F@Z*S5%hkuw}-ySH~UL*+t?PxJX6f4r%1O=12o~#%Jt*Z=VjsRqD90r52adaHXF+9ExS6$8JCO_@c9+b$jfT*cUdmXckn%(-J5#jJ7E>4 zj)Cg~(BqjFdC*Q+f;@xw%bQX&7|Z6=rv9(J6{dUB`0hFBwISK_@7k9!aLV0r%L3 zJd<#4~qI;j)V#g#UJntF_C+Zuc z-0Y!|nFp^DJLE7^EfqjNKJ%$+oY(g~OBS|amBinRr0Yd}=>nAnhlJiQb!3Fn+Y+dO zhn>iakfwd(efhXAba87+JqA51pwF%z2h6*pV%s}$eVQU3l-XhRHht8OvPZW|d3fY) zO}=;lJlWs)?1d$^lR@`yKj&YaN+e!ySI|olsDD|E{h^ElNik?(w@jdhEoRIv;LOhs zS9hggXLvehY_q`Z?lsWk-f+m%8f^CIgpE6%inQa(cWq~kR z9gPmmWd+P7H)dBB6qsKsIQByWge=iYpwa@hUq!QXp{DU>7@`HT*MBZ zfjStua3M8U&c`wqLGND&+xnXlX7P5D2Bc5AA#s!*)>JTWeWm~z*X^;t(1SDGOnf&mz=#p# z@;^w&n%fn4z4W_y{FKZKBXX>y1r8k5ML9i#O&{{H$kQ3Jx5CkTbPBTP8N!Tu-QKKB zEZkg)@ttL`NavRLo}!C}bNMLe=Wx1gjqZzzaK(fC5Hof@T}wn(S6!^VS`Wn`xj1uk z7F2f1;9taDVP~n1JA5DCvb>Ztqd~ejUr$f#utLlTv?tpt8urI>kd>nkecgHt%u2(vwplppHxSOF zT7=enWn4G)hS$ES|CdXib}b3bbI8+So?MDLbNO#$bUa&y<*k`;V1KHl>m9i|80z`& z#l)YJaWTafm#BM(G&*Dc%vfe>+|bKHpZn2Fcs?}1EA1*QTNQ!QFI5t+m$Uz)SdjBT zbWh}5?QRfmQpc_2eC6kkj5O)GJgG9_K=j5OW#@PUyw+eMPY)Mv^(RxjG3=at_$Lw~`z@vQ?IqB64&$ zEL!kbY@uH8XOl0s^wmeLnH7@iI45C_%p*Jy_o9>WvxmSG&YQm1ko$N`M>Z56hVC+qXNH=WO~h_q-A7GHwckDmA!%4aYJM z732hR4o<$^RZUBDR`J6ZlUTeMs)a7}LF6z``AxHe466QE{rHvGyOz3qG#TgI-^pJ! z#*l(MJX;{}?3WGxRfl1hDtWQ}%h5-h*Y%!qd{^s<%0BED9juBM{*ldLetEc3czqld3_R`1VA)I@=s{bDs=|@Bn`lMOV zN9PWuf_Qk0HN?@FI$YHupN{izgT`*~9&%Ge8L9K^9*ZDvReJMmFpBe~X!=q7L)jsB zHXYYAbkIDa22Fo6@OV!()K2!s^UP+^?;7=!7lG(=R~L`;*th$NIhCdKK!5j!|C)Gg zYBAwnuo7FR(I3B`yvzJ>y_-7XU|audG*QE23|1-IDV!tW;+!j@EL!H zj&(R+{Xgyy`4bYZ*S)+0o*aKHzWQ=M>m7=OLh_CuSz(iTA@01j#lVBXoS!pe!MW0F z>MsM==HOpgfXjR}U#>F20nRD&<#zLSr#&<4Br6~kT18F@EDafM#=?(PPd zoJIbpK{nPZR%6Ac77=XtLyW25zTj{$zCx2d3JzGseb$Zc_9#0P1*O$V=)T+lUH&i+ zOJ9YWH#s%)ev5F!w_;PX8hUHZfII!4+k73NsaTB27FXP|j)cwR4E&hIIl`AZ{3^)8 zXX>>!SAL0sh5tm>mnk?c=f!iqE-u`npYk5FGm7kADGbNsV=>r4UbSg9dG7mik+_e2 zD3^OfL%K;^ILLmWfg!N#s|Wo#B|D?aDVY-y`9A zvX))B|CNIB!KpYp-3^(y4XF({VrgOlHhS2jr*9ZGy^M#7oFz{8X0PRu0&KNpKf)%m z*%e+056+j*DMnzNv?erG*&?Z`7&dWE@NNvi?DjlN>#4&tbq(Iga}H!yheu&wgvHU1 zB6byfdR09ztbv}<-|P$QSAwcIdn7H%M1R{POx9xtl>37I%oBQ^A=k>bKk|n@67RVG zQjdv75cyeWK3d|ZQX#^a1AN^~{zNr% zLNVO|=eFg-YL*#7D?JfG&2w`AbFELRK(aWZR+dTpcS^e67p9qeCO#C~$Le7$HN>N? z+Q^EwM9Fe;hz3~U`J`|()Fk5mGh=vuWqxy6KKi?rq3@=DqE_yjNKnzlZU=8@kX<*> zfvh&>Nmkym#a2Z!#?K{U-#$G!PcDXaQyTt!D23H0Da@0(ErwKT;=-0lyiujDv)zK7 zKb-&h*h0CHopJ^Vh&*G2#uLogaR2$3xu6~OeKEe}xhUA8fkoQ_(S4;6_OInRn*NY2 zVGg+T(i3yo)pWV1KAQO+s|v})#P`(_ulLUJi_kfGN-W(4bWcsgx&ylC%^AgJ{!+8P zxnR#GZpDTUW$QO zd6BH;Fn#FIk6H3G3xl^*VO*XZ=KN|AYGF#KnC^^gD-Ez{vIS0h6hJ548gu9qct4Q+ z%>2H(c`$QzGZPAXOVO6T!Zs~YNLEiykvvFAAB)qbS4fC7SPyR0hLvG zaC||%Z~F^TKjgYt-`5al*Qen48J^2B?a2x$#yVSDERc?a?wnL;Y!_Ucy8g2w^iZgU zhHVGbg+CAqhDvA<%*RdCh1y5*w#oa?`D4$nodC2{CZn^h9@_77zH&Der#@Cl{C6tH z^hUS0&0@T-DwZ^P!R($gw)$G)NZ%BC)2v}zCh#aT4m*CT;df>^^5bK0#8C3}B)wjO z_jSk9mp8@0T`Kr+)*BH=slOTXx*3&$j~~f`JLUr`uLMkzbH+0I>v9$rW9166-z2;~ zXEc#FbM%FHm^77)N6zc%ll-sRoP6~{RIRYZ=Gj4LuVrTVsR4G8IedF{I#hp`NPK-t zdR zow0IIi+HQ1jBC>D8JVFE%@S+G@jP>eT4K)oK)4=>2e*!hdS1-gOAacuOK>Zo6O4R+ ziRc;ZyL{-4m$h05+HQ{${{k4Dx8uDXfajx=@q@bR*9TSDtdS3oV&-X|d=~fP-wRnk zCHC67!LHB{Pv%oUe6yG991vqfzqSn7uls^oL}k=cO_%9?=b(13!ox(fUa3 z6o)Rm=7{KMo&*ABC@k@_ALR6@UgE^18aYp!2W{ z_)oR{gaci_=zScddQL~mKla5Y5g8xv@`TVaVd^JCNp4`unI z>Ru$qO)-UjRyl%|vM`1B{cfiY?6-I?YB*;tKJ9~>G3-HlMgH-De5_%v?o*&Ye168z z6KV)eS3aldwYzk=9Hom|MXStrkxEWt-eIyI6!fv;_B7#dl$<%%eMtcqKE?a`6@jhG9@kPj=w-Yx7kokb?-s8qxc zo(hAv`LI09?%7WzaL@0^9;#QO&j1~q*dL5!S6#?$vcVzMVvHPXkJ+z6(EDvVZt?x6 zlU;?0QU$QtQcX^cWa~*f*Oqj>W8K=tz|;4|9s1U`I)@?Df&T2RrU>#X#)6;L>>mh4 zfmb|+Jk-MkK3{sS%0~9kDiloXgTeQk#lZ#YoWX^l@wO&>zuUuvGtDVCY|umBl^Kk5 zbX#Zw=MUAW<-D{kqYC$QyCF2;rIG0jAmS)^LX zC}v}Y9QjldJ_ox$>jM9$&Elt(GVY8FfcrXqJf#09jyZPk&d$hn^hN9lvV}V7^e1gG&!JdGkq*`t>D+m9l8$5Kn0)>T8oM6 zsD~e>?{sA^7^wXfUsN^e*Yic~EIoYh!TmqItPXsib(>5M2wBlF(@e2vSv5*pxlepm z1x@=tsP=pIzrfU!lw15nwMh`8DiGySkCj{Dv9K8E9bRSLUvn&0DmY9HyCH z#LnT-#hF_}Ig?Gnt{vnq{-{Fuc51yzH4?8^(soHa zD0nDT-tk=PABW&0^v5o8!3)!D_z3zV*C%4_s&u4pw?u&ZEF7R8YR8!xH2&!gt9G*8 zdrd`?12dXEbTP5P34`{~$GOb`lV?vOPc#L_@%&zx%h@E~QJ@~?J}$%Cs$`f`H+ImzbA#SUvXr_6kwwDX#DZOg@c70)zGQg}VCt1H z&gT-fj{W}8Son~g@Y~8TqjMgjIDbF&RvLX(o`^oXG|*j|`J#P_#S|9U|q(P=$4{D!*_&fB6kZH4W zGg+b^k}x;d09oW8KV3m?Q%f}zy=3q)Lz@t78Mdp8lva zpNXlO9%wexDZa$Py1EUrEi}0GhkmYiU`7P%Cq6`b$5UB01Kv zwm5ulE-rc0;$XyV7-V-u%YZvViTSp?J?ZGrK2%F9FKFlSzP`mdEWIxu7H6Sf3$@9) zvoJiH-m{SWDz`8M24aZoSqvXDxaL8AM93MUjX0NQ4ot7pGvUAKjcDM0bw}@L^yR$Yzccv*4F!ljXoob1DBNvl zAJ%RIY@J$%ahLKD_rE;tj2?Jg+A6&7YM^a(5aLd-H)@Cj)OHpi=8h$jode-w8&6Jv z4HVnRS^HOnIkpuNuP13PQqs@obpN-QyYRNq3DLvBujKnIpuRNIik(KqD0**?0l`5q z;EZbTBW*0 z(%RNc!?8QlF>;R{2K1^#csPCAO;y^)n{RG;*|JWWPpFU?8H@7VVAl&hPf4>=&c1VPV>d} zBQw#Pyzucm*c;%O1K-3NiPyU}u@iz?pNfD*YFIwmA1BCN)A(+V<5vsW*KLdZAtBg& zC7#?a1DHK6!@#Tb^MzGPd@g=xF&4PALF_TKK&U1AiN5LLnTo*b4uw#xv&0rvWBS9A zFvY?M+tcb%Qj?Cl%-P6MAa|Mf`wl-H>^>2RIppbhdzj*oDLb;<*xOz?7kGR<7bQBI}JHHMMy~I{8yH}ya!Tnqn|hCRgkOy zmW-P#WfHIV;G7KeH*bW=a{9CUoe_4D@1=WId_J-N*2xaG-~C~1kqk@DyTf@_N#R^* z?7}j%f04z9V~@pKTTQI^;SG6BZMX(`B21*VUy!0|G2*cOpL z*kJ&2n1NWhNDHr(Los5HHhM4*TOMABnEN)!-slFa2ju>=(Wkq*7K0b$BkyPp2JGv? zx`M~zT7*6vW=zLj^0vow{*%|g7!PMV;I0z8terE^*F~M(NY$u9CS>*M(C*SG9)|r8 z+4D3IecuyB@+!<2nWNxxHWqg=hKf=odanwGlc6!%sT_IZv0KL zz`+PH+o}6AuWjHotPlQi7mYo3Mx`bpP$F@H&q*Op23coN(V+hTqS&s6q= z{)ooVI31iGNnNCly^o>vTix?PYDgl+?;+=74(E5&?bUAALeicsN!Pn!+zSDD_eIyI znz)=Cg^f4VFjH2*gnic&7jq`Igr1@k^!ZOwLrs@Tn3*OcExZbm&!rKJE#k6O6b#4D zA*+);?xki}w3E-CN9K6E&%OypmVQikef%9A09ALQdD7AHl z^oE(}PTt>xK2?~{KEC<)DkWa;(Wef$uKibZQdUK-n*$F1GQk-KORP^XLMr|9dhHQd zQ5^$?E6k%mCA(lD=*9kOD!p%CBC znhnJ~KH-y$rFhyb8N2sdGg!k1iqieCT^QYry$BwW90jsM10BE6M;)}yNN!=fwRwp|fRXBZ%IQVjk~0Qs1PWZUxnKZ%~F z37%-0ABT6gQ!v1_1Vw*i@pfPh7WSWrjq>-!Xmk3dMth-RkQpY7q7S!YK2EYjxYElN zEvfXHvqx@!NF@%W6=C_IQfPE-6E-?u#SB9&6i{#7L%sd#BRk~U7D4(Q`?<+=Uuctt zO~3VVZ%GxskJLB&NX|=lbsJ2^^&fE}K^>hBlGFRh5TW;g!#@kjN#opu3>w9Wp{RVW zjRJC9WM`*hG5x>IEdy{T<$_pqmOjt@8CcUt8yCi!A%xsXm06ZhH(<6zGZChZI`~;q zgCk4xu)di*0hO+pZgX3_BftBfNfcK0)WxpXj(Ctj-!^?rXXS!%sy+h>Qr3|3uZBYp z@({+*Z?i};-r54`%fjX2J<+>J53)hAxN}_zXFAYd&bj{i$yV&@<~xJ^97@cu1jW}t zDKH%$nWMGW>4=q{O`>~)5;Bs4vE;G=o=zuUaYF&-890!e9F1!=nb_sTF639$n9KVz z-?a+yS39C{{993VeIi!8b%ueWF{UiG!dBTrL{(bBV*7O1u8PKzqvr67FU7FV`Ru1D zMPvU?s7v@Lj&V)vMxy0)= zA0CM9>tyktv-N95KFf8D9DoqqD*S zEZs7azK=H|&qog>E6JB~H$;x`z((c@4ls9bINgtWS^?Cqdti=HJ(L_OVcES3lCCEy zuSC-IB=yfLis87^)(I-LvasB1g7*h4vA~c1v*aSE?RCJvU16|$5s#}=n0cjM?stVg z9nK$YrVRr=oDv*gA$MFQ^G3SpzQvXAxMK9yV9&WU_vj}Jk(O_Yg9eMBGM{r0%Y~41 zy(zsr!{@*aF>j?deE#RBT}t1@Dl1HGEr!ll_M=Q92RS_h?@t&&?o9-nSFzE ze+!dcAH|dLO4!Ft)ID+}bY?js{!jsK1h`<1LkRrXsXeNleQ&#%*N)7_?l-lNRDbCf z(*gl_a=e)4f(Q9UfI}v<&iG*1j|#ZaxApgJB_v%>Qhm_)aJ#7g z-6DJ@8{=fp1SB#Ktmf-u!M>8~Heo-c@Febrkf&cZ zJTD&yH>=}Pj4OgKP%mG}p3ud?2=vWE%PfJEA#)HaTa3AD*gG$wPGmo_BdiPGh;G}o zkQopHt>3zc8qWOQH*#v2o2i`Yi$m*@;KY5HY6E$IZK+USQw{f{9bl03OpKQ%_iTt4 z2F_yNb*&B7-p!(3q=V;j;V|&x{`r6_q#a5yhPmX=N3yZ&pbYY^UJ#MH4B&KXChR(} z*IC03p(*)DA8LfuF@boS8w>OK%zDuOP?J%PfvZa387YO=ccgK%qY93z+hBqZb2fQ4 zSZ_uh>=HR7IrQAEjKknOJycYcv9B}@IL-UvofM9RJP|4zw9x!AoLZeBihi2mcb_6y zpJOiB%pbpx$794UH9UG&hCtaQoKGO1&s+xG7TptHnsl+JBfSxRS}1ijW2bB(&TS!w zJdK@|g{c_uK@~n#?EOvK>o40`p#>isvx+#%y&w$lR%rY2}O$b70QIath>$F29n z=UZ`D-E4w4H%p-8l#Uwu2)}jtEgHuC5$3Cu(ct8Pt0#2em*RxmuAKKvyFf~WLjHX^ zqCOkpX;u|vnctTl#vIYR-e746#s@260@+N8)&j>Ptl_-05Et*6Vd@-mO}en-@T3Dq zC6?f8hhq5F6yZTYt4MB^AsbEuvXjkF(pM8U)X%ynbAPtW0@*#naPWQvS*N;K!ntVq z^-T2cP!3h?KCsUFEF3&Fao;u!4smL@{L2nUMsRMyJ#|}!1AZwb!z7VClQMO97RCAO z0`8M<%Anxy8=*svpxsLHe)BXjgnPefp3I5ew#HsN5BMKS#D6pOF(|$QlbqO*ai#)) z&84sjH$>4%ZRS^E(3|t=$x0| z#iUzONHd;(*oNH{BZME4B8y^nDHsc3B?@9=T>6>aA|DXazV_Z z4*h_B)Wa)v@!7*1I%P$eX=4Zr5ee7oU<@g;f$V2;uu94>xvUhIe1C{7f7qY%Q3Z5; zqn+HQwels; z(2iPQJ#&QyZ!BPFYYnM0GvOsi4rzyK_NGzudo6hn>YhY-?2mdQ+O8;Jr)v;gj7l`&%3Q$V?P-|$vR>l#3S+vWm)bqCcCaos7|g^ha!!YCu|(*1 z_CcR8!?})bxLOj2Qwt5r#ji%hDDt~~s<2YF0}juABJOq9!;AjY@ob1b3^`A2iKzRhjR(gnv7P?8J%?*BP;v3Oa%wdCeB-- zjQYR7dImjoR_K>A7e|!I5Pe)HvCcO*qfv~S{#5+VrvFMM4h?hFVC-RwbNqcP7BGMG zY&zC;h(pv%Lzt@6kgZmLNyjDE6(yZFN~&YZH4Vgb*H1z%Uj?J5x}euAZN&cv45c6S zu8tLccQA!nZX{Gf)bX9qu?<(E@q^cA;c*4b^*=96nDZJUod!33P53C8-$c$=w-R0B(RD(2#OgeChuL!j_F z3j5a?K&E3U2G(Vw$B0si&zq92*Hf(vUjKM4E|M$!L@|eCn6G!mVejxblOZM5opGnKTg>I#O4p4sn1QT*^NK zjOJd|p6{wopO(R)9W=Gx$ibrG7LT43*&aOn4p zgVb;4lGx*(9#D+O`^(YrLls_q`(lN~Z}H1(BC2<$pz}=!&eZGBvfKmrPE_NsLJADb zXQ0)cxp%oz9RE`$@p_W>(rj{BDHg2VDt7epLi59N$hm7l&C41aWeU+IZ-EX^Q?YkL z9>TRar+l#xYTU2uv@gKOT?24yUaN4g(ZaHaA@J#=i`B(;IKHtMzmM6#LD8GtpJ{Np z3HXN8;@7ewd?>9!mIgCfvKNHRZ9Vj}&%p@x=v-2F$7k-#gL9{$E8j(%5{i(MXM)AG zb8v&X+=Q$-kjZ)^bnc!JpR<&ZlM{ttBQ-o=kMP$e)9k0usS4%v zThpI9EcKJZy8ZXWEY3q8pNhx(YF(t#k2if#33hLHfLC%DymqHTr$8SS)Mrn7Cm4so8gIVlCN4as-V z?u*p44mkTl3q7~`KxvE~($4{R9~a@7j~Na!xBFlld8r%qQ8u^=ab|hgf3gZ=KXruJ z*?&TLloFP8cEmXLsJx6b$7erwK%TIm=Qs!-H-|xLt1){TpkHac=&KA7`4jAb8Df*R%>seqqe9MVS9r)ttJtf-w{shEoW zDP-CU9n|x;8j3tzZ)BduFo0blGq6g=0ArQN>H40I{GR1-j(IP125b=hWCO50hHTQ; z&Uk&8EQYe#7`2_V>e^DoIwr8=+7}yQYtT?V8*Ni0UoW>t9*i-+EUx-B!iUU;a35sYC9jIsP`~ z<7PkRkaE~F;t+$4BaQJ|s*F5f=G@MfO56t~scu@OG7`tXe-`m?CgGNDx{Fpfd20duq?G?A@38~w7`HOl@DN#E~7e|;2V*6$T2-L-MLDIUZAY9Qi=#b0)ocf<4`RyBV*@)+>H5(jIk zcqmz2CdjYi&|BTt6b0u(#0=<}tSgfCmiKE$NHD>`1 zaYlCDb2e6v>WVI|ZDJq$;kpj;gWGUTw1k;}kYf5-1k?{s!|fmbSXZcnd)eeUyd>wD z_q(LNna>XgVlTCnYtzZ+9ub9le+{ftwnL@?eI+L?aCEIRc1Wi}iL*fOIO-w43;FIK z$5ld~PIafADA?F4&U_{x|0%ooto4}j=lL(N5G_Y+$)op$N<#`PhnQloR29ysiv<`@UZTSq9~^9(fjiR-@vp87|HWnF-sf`WwB(@T+%9^! zO+lQJ6%_yK;ypPR+2Og==&Yd@<%?F%_q;Edp_YE{K+Qb*PD&+SPtwolv}d!hnY>FV z`bFX-`FkTqn<0I-4stXrp-8{gm>&_ic_9NvDZ}wGjJ!MP1bl3m10B&7D+)gf#R$GP zcYDK`+-7SpThy84B7wf&ye=Ln9!o#|dwujZ;J!dU175zB=rpevCM-NJG)`-vvvnMD zse8D6XXAbl6m+TYu1iD2#R`nA>cMl+Yw@M0Ci4{DSUF4`1_!APNoV8x z5);%!xZ!Kx8E{x+j`Z~v>`SJ9{(OnVa|^4355=j{=R)y{8cbe9VxO}K^PToE8_)d& z*xRF=j3N5WMyXIc99@SVWFv)stdn@X2V;MV_kCN$=;X<0UdgT%YDR!aLchn9koS$9Tkq@O46%w!4!d%F?-c7>cum;k!qR@f)rG@2|IIz1AgLs`q z+z3FwsuXmS*TJ^#l}PxMg$jN0VkMjd>D=oA-_oz5%U~r8ueU@uGDpKsIKlN2&q8wa zms}{u+0aDxD~F*Q_bt0a***P^ImMlSM1AuIfk+KZDGz~4H2Yx{Y%s=vzU}~5M16Hf z41J>-dP+FdrxIH(#Nugl4cvyw;Pb_U!okH9MeZ4RW-D;7irlSzHK^7wgZua_%zR0H zDfywx9@pUTOtP=pyDC4vQ7n}CBvyB$Pyd}04s_SS@^LnJNPRxG%@${lv7_*JGW;8j zAibvoZGY3zaW>}y??&PE$Y0{0%@k~U;E5g)Mv$v#&go1lHVv{yQMxJqEsa7*pg@sA z6=Dy?Bb~mB6Is&etn^(h575Cy*ARM9$qW2{yq)D=)!FyH6&1S`+p$}u8)RQ}hXG2e zScn1A-5sJBW7jz9IQFPxw~pN{b}M$H{?_OFC!7brM?Ueq_TilK-g~dL?)$ob48>2* zv8=jskBOSVvvvt|l-}P);n($(Y`oo>qG!->o-_4W1JU_^mCzAsTmRd9dVNS@WnG1b{rc; zk1oQn6CUUN*SRcR7svd(d@PNhsXOiVsaJz6=2QmhGViIgx6mZ z`4HBd_{Ift4u%tGFoV$2#+><76En+xTsUCCo@-`&{L`BrHIsP0(}|h-fo!sz%72sH zxTKxO?IxlhH_T#P{rZws|5W|zY=T~}@HLv7ap4&((#_&)hBN1`55w%YDQrA$OjRez zO$tiDZH(xlpLD=<|3~E-sn5y(J$pTjnf}C;x|_wn*~JMh@dTB}MzJu;mIfBZT$-Lj zU-73tjjPG(<&RYD0DT_k1_@(KbV@Q;TWX!f*VN#ND@0 zW50!_>zz3u`G50XsoOca*tQ?WuShEn|Kmp1u1tD-^&;z0AQqX4lEEhWa#zWXSeuAy zS}d%-FY1oxB~^Qu0RdCOm_AE%FtglQT#-pgKVO=U8O5I$rA981`}*8M-WJ5MSNu5{ zlmrpurK%KGf1Pafxm$wVe8u{VdFNW$5lDjK86F&pf*wv$mC0D+w zwbSmZflZBAIXHv^x#nb^v?IYWgH118DLxU1ec)uaUzdCU$-pks7QXkQ0*&W?ny#nm z^Zu@^Hi4sGsO|ceEKQ0cYLo2S&O`WDWusl!3zr8WSWJ`*lpewbZ#;`;r?P2fC%NAJ zTalN0L`|rTyL5|@wUd;IEYs8lLQS$ zwb`-wk@EeiOY~`(f4?`E9Asxq3NyGSzM$XRk0RpiGmuE;)qtVJ%p+Sd>k z$`j>u&4_azWANQ!jQ2QKwtvo~!&hO?pABN;*f?T#n=-GehyzQ-M{!mBsT%Sb-uG{T z+pb?qXPpV-n~da;gALm*+tKxUE=k!gIB%8yQ2&Wk-LS%AzT~kVO6QXJ_%zKkjaChr zpMFm@O&Ub#nGhNkSu$Kcmlg{%SSOr}&Mto3xERYJJu|HR3UNLV!>ymvm-?SxBlx=- zZD_>Ciz9ImK2dC%3%6%yaU;xw+ucTS{&u8jC@jd5oPzjaDeQbBURn*;(^NmG>3W*x zerVeFUYv7UT??sFJ<=?xh@Q&5Lk3taag(zulhPZ`OdKv5KmSB>!%4Kc(w_~Myrp>I z0&1$8(KI(u)Ac^=7QWP%yXwZjlAk>xoQfwlk{#%Rdk2&$tMd)zU*kOoXj2StSJ} z9K2l*=fc}+pzyL+jtwIAlpz%nE^MtKIm?S(86Q6kovcJEE?bKKvyg=PDWb(Jf4+HYR<fYX3%)ezHkz43QurmjwI(0L7nzd8AHsFe^zX~n*J0k1 z=jxmEzc&9liAVZIyzVxHDw(st*y$|uCm-5fi00rbC*Cm=<5yWE@0Yyfti~7_9#gyj zvX%_wM3P%c4p+G3;&sXA_7rDsoekz>sPsk4gg@_9!cEatHi<1`?U@a#U80Djp<}d@3Nu4iRk?+(3f+gW1rcSE&r~j4LG694;V7E$0YVF zHswN$C+$CEvC-9=!wttWBwD=Ak;cv%p`xo0H?QU++lSuwp9r=g?jHyPB@oU}r1UbyPZw5STd6k}iuONt!4$d=vX^|a5^hNUWp|-)wAyx{U(LHJYq$a5 z>WyMskU3Ag59dgqWS-x)r^ZS@-rbV=*2td&2TD0*nNPE!c^dmW?v4JUBFb;8jPODD zX9kF7+J-?n4g}oGLN{1+WIrO9(|!uh|G>rhdD8z)fHn zaL#V9WWoqXOr3JMcv$=-{|v`7JAv3==7f}%(5PH`%SJOa-d{9bPgDJwrgev=&!47! zT20qmSl*cNt%Whw+K6FY!ch$*N9>I&-(RGO&dG)4US8pZ=iUXc^++~RjF{=>NpP(Ua>GR*GGRQ&3KQ9Rd$4#pi^xq)Az`@a>+XG1 z%Z#g3mgqIg^+!-(CAz%r&a@RjP{MtwJ6DFuYoZx=PIOc&3YZv^#_{AL!tQFb`2Aj$ zd)*Y>$|?N*MGx0}FZS+|90Wf%J`@iZPk$<_y=^(XvzRNg2fK+*T+{Uy_o>6h{$G`Q zjs@>;1hP>4q${WRGG}29Z5B)3ZvSbbnVwATQNq~$Swf=hWt$rnXuRIIf@*bQ>1*XI z{m7>Q;nax~e{N4NKCjLZZK}Vpufo_mDV1xNBzHBsOzu(%l*kckb0<#N{fL{F@20K7}!< zz8U!w99ia&g_h*D5Hbp_F_X~!$A(7fMbvAZ$uRj>)4EF2+$&AF9GdzcIzDKQ-n*6R z<69@JI?v>Uw&d%H?{@RW0@3k_zIpUWVag>6i&!z|$Q<OanSBF>P_iI~E~2YAD!EW?H>Yp|(HJ>sru)^@A5i;!eICxd zqNYEWx>k)x`=anX?2<)CMTaNaRsjR;ohLL3%e*SsX`7i@b*V{U`Edf#YRJ!o& zi&h4+FkE!j;xDc(JkpNa9C6v~&syV14n~T`;dLRpb+Y((VWGzB8O~})pM#<&YGH%U zscG28S~50I)+xzlgu4elEWBu|7mxdU6SjU7UqEm&2e-_iUUXeH9ekmp4Gb7%6((~~ z@sA&IVyWafId73VpjQAps$~YCpx5 zp2Bl}J;R&ZQNeWY8YQeX8-o7M<4kBQJ4_0>aPX$Oa%Y_?jvd2FshdWHS>R+cn4Voq zB-7NM$z#W{_mO0^Y}Mtb^GsH+%*K6j1xvHFFgmhWwQ`wEh1MJf{jlKD_hD?T%*FO! z7Y=n9if^+#Ew=DzxAGJrP5LwO%;LfJkys$_<<&Bck%?VJhg zFqO7iF#C1B=mFx$yQq1e-L>MUdi}Xt#Y=tQbaf;LhYY47#feWc8@BZG=0cmXObVJt z)eKV>-zp&aTLQh;mM}5@mP)^UK?N+g;_irOcBfge_lGYBj*F&Fa%N)}#jvw;2AkhI za(L-XERwUx_9)lbUlqLgoqC#oU72kjNc+bA%&4|ti1@0y%ACPB%Y(`qksLR2tCagW;K!XD&1UWcUHz|Xk5zc6vc(Sp_c=4=Sv9P>=t6qt$ zm|MVO@uQF4T&-H(?N4QvE&ug4!E3x^rEW-~*H(9W1pCvZ!8FMOvycqt9CAiSGfM8X zwOzGXRicGXm@zlI1u;SX?Kr_%axpU4^QQ}qwMO!|%S3K|H|6V&VtThqX1#6+$y#kB zA9|m9{!#n}qw=Yi{D5OMBuDB=9=ma*UAJL$N{FGyIhe0g!Be|@>ZF(B`mhFpai7(_ z4TktW^W*1XW6~}ei}abHT) z{qyD3CT!?%Ru#mUvOO)9D+&5+H6Kd%b?Gz;8%mL|$@VnOCa;D$A5&+Ml9$9NpINlg zZ%LuUEA?iL@QnSW=as6@@<%T8YLz4zT<(+`*b#grPV{<)6g@8DjmuQ_{V3)}UMqUE z`l{ZV8q?=)FjD zeeoAoi$wLw9nO)J=Y(6h#)GDR2@5uJ5@C7Pr2FSkH8vJ+sr@xw&q2d{Urpztn&u_X z+p|OU^IoJjHuDj-bv#8EgIM~bAKnYH@XF~=e#QTCXo98A7|MOK0!k+&QB&%OZX-Jo zT-1o}AAYD0Hr-fVXorQ&8`oE+(KyCcG{gb?l@iXQ-u4*D-k4M#%aqC4tQyvsv(ug` z&#(c)A8^3N)=qN69PyO(1{`z8`1>ddeWw%A2l|QDEAV&%Ti)bo%#RFQuEjH($Eul| zA!+kM@kuh`dWz%@2&bUGXbSrE2;_EQ9HU)?|8={FMssBDB>9bFGk&PP!mb}xN0(4* z2U4=F|1W>ctt^W;XJ77Ck7t4CbXq;M;mDN&Hg`?o(Y7KqU2nE)GqM|>Qy0GJ&|qiU8P{l&*g z8((hTNhffnE2pZ5VB(O-rFo*==v>C?7g;>JRm9WcmRwl)Kn+{!g6rF8UY<3@v$nAR zYUb0aLNewXhSFVha%UpEh^a3+N6B!UVpzny4K2mv^itW2-tBrle;&n)e`TSfG&~Kx zsiL3wTRe`^FRzw!F=lo?dP=e&n#;U-b2Iv#dZYS>OWs*v7^|j;pZ2~BPF0c#cfpPM z!pOW=G@XoO;Z)x&W%BRh8<08PR;A6kD+krER0Fgxk7q)#^gTi)kLIwP?`e))Suvje zC9#Yg=*4fF3Pl5+%lzV8G`-JX>eN&?>K|13EE7hr8G~Pj4XXwVL&ZK1-`TF%JB;C! z%(?zMU`N_~$&uQh#r|Gp^gq5&?X5hidU_3}*esE;_f43yz?=7NQ+Xis@L3@-{I(^X znn?}}=rNn6Yf_{iIg`y5HOa15n_0f5*dG|mx!;T^8YA_-c-umsxUpzc7;jHSv%kWV zU+HE1Cx6>d$=SNKza=YQ->0b*V8m-E6GoX&GiqeYSEJn9hr_xPeURPdlUUF70a^@wE9y#%@%Q(Uvb20 zZ#mC8X7f(^=9=c5SJbakS~H%hgICO1TsVro_GZj{t9VwCL+wgOnth(YwfU3ilWxI3 zOY<3iK=`eq!HRs|2LIqjT+!}?*S!rY?i=Q$1-{aw-NTs zXLVk9Yiovk@PqfXJ8$~!j=^)CV!r5vGy5knyI+yU>uvI>MMU?D zYR4(@wfrYpjbSDn>;x9l?+I7Vq{a=!zs^)@-qgq1CttXY(^-1Ekm|&yoF0B#HCkmu z#?>Su9Q3IcJ<{2fEJ~fduuAu#O>8lp(La z`19{T@y*wB!M9Yhn-6)@edZWiCrh^S5AlU&3)dkj5wAOi8rRePd%sdqq1RQxgZ^wj z6oiZ+>8kC_-2Re3X6?(Ro)ZW=5KV*GMvT0Z$L94BJohL@(|%p*fAtxeep~Iz)@R4i zF#1-QQ%BA*f2S^hATlf<_r85Q$4C-c@_yjlBe^SfnDEG`Jw?~4n^A6S#z zPQJIZnZmO6BHwo$)~n?2+hb3%gVfQB)9E4lI!*c4n)(!P22?Aniw{-Ja=Axb3F65^ zE3OQ5W@KIQdH>-o`N0v4TpPjkMw00`tbjD(OEABHZGYBa`IiQg6)irgQXe+dwPM}> z?uHMtSUcKXGPc68DVBM-ut642E@i;>Y(~wHJU8hpzCG|-jWRRf$fgip>f2!#G>o@l zxr{s}y5^R#^pHDXyNQ0puaei@DdOnAg_w7*R^iPn)sY|iG&n2_{&UuRm-^dgXEvr@ z?wtBOj@{dY3+^NQS$$z^+?HqNdXdKZy4YDw7!vhW{gZFaY@IP|Gd9JwnJanovZztt zfy@7lV1BQOc;B<*U*ThX^UY?AkK79jG|N+5yz{#xtZh&_EzDRg{D#jpEm5NP{?gTwtr3{_vtZ~A$*GXO{$H~_ zaS97%^`$sQez9Ssw&=>&33H^ca4R3w;B05n_jns)yd?~8BU1t|Iq*Py$;CR7!B-H% z+t1PLa4_ZEnnE_@3;$BiTTS(YnsW9uy)S9{xoFBe)KvGRDetMGTMcg7olz(IiqGx0 z>BKjY^Zle7yM#Ys7~xJ?{unYBCy+Vch8?0ycI=%=&q~Q9TG>VzC+}2e2VGX0i>LIy zDXHt7xl^5v_j6Z#?s`Zby*x*w%_)m3<&|k7{}_u-E~_4rkw3E zjH=VZP!(PCjdAi!8pU#cln2_vH!-@J$Krm4Xu7{>dJoZb|E%89obs>}>Tk&nkNh4+ z&q*f2BX;HO9@)>Dxbk?sKbs9w33d=h@~R@1Ue4rNVG)|nr!`$q(|j&X_gzhMWv7m6 zfp@q6RK2ezjGHi>37-rIb@HaNbt>D79WeSe1hZMu1V{$KA6?7vlJll;W}(LWi>4fM zP5X5%%G&dD)l+qPmg4@;I6OCq-{OoDCZa`Nvd2SyFXEv{n$9=LX?$KOy4}De^3BBm z-uk=RnEXKnztU&b#!xmJ${aAm4f|~wjOp&dvfxqdUKY(rWy#~!#SAk}MMcVUtKq#> zQ~j={>wSDvkCfT3l&74-ovsCOXOSN1J%od1mQL9}P6W8f>@YY6lK^wf`W3TnV-m&h zgmanFmh5@Ql|_^ZHJT*jaanS3#yc`p{3|UiBu`3emKiT&czwf&c|~%MZy&?Uw=)Q9 z@mgii`>qau(8bkDSbCE@@IK_u71wNLZtx~>Q35^fl6Y~|nbwU8F_J9Rqt3X_1pha z1H8olE)3f@zGj5SI`P~_>iaXU?6e<4;hP9@8(6ZwgXD)VPsMbx=m^7gI5_sBYF(gD z=f>W&nQh3PU}t8pOF~_-=Si%1!h9#w@4XFg<`wY8K<<(6W$u;I2+!Wn)z{v7Nm;GjL3yIl;}JxyjxGTS$m`HZIPX__A%ZeEx3L#x!{V*?mA zLh{sgME9HE!2q{3HtNeg`@dkW1WxB{Q|YC)$wSX1mEs*aytH_vG8R;--L_`5yd1;^ zsn^_^+A~i0Js!thd3kIMCGpd!Zf}U8Q69;{xV4hG)4|-jJgs=3>U!()vV15Dg?GQ# zNb=@>2oFimU2?icF#2H(9oJa$>}MW+PvYrpr8%!_ZJ_I+p>G~38hohns?RF zycJ#Trw#7xtSfqk#_n`$>&v)N>12Je75?uG^aCm-+8 z=sU%XQ@NaU9Kw}+e|*=(u<_469E~bq%k2p2?U=#LF|Dzvk~@7?Khga7;9PFWTH(T7 zb4d<3mw}AlV1cGE3N1%39YpnD|tw^Ysi^ znISC0qedJWF$~9y6iVv(U^m^BFOexE|7%U>53})an@rnRvuNP>TCFszQU(!5e0mYY zm)@qraq(hL3(-aW=gIn>6X_k8z-{S;e~T!kmSY;Z<4Vyq2Pa~98_Ddrr2LKaWn{%NC$g~H(z_WWYu&4?~R6HdOS^@^tF zil+QCO}Q!thwBrwccBVcZ_3|0lZonW&d|pWq6x|-tk8jt!o6{=lgf@>2ydl;Lmx8m z7$bad4g2h`v`?$yzu#5kHyUy6(L@Gl+wsvxdyEMY9qz=%dU)+|Ay#~M!+*I5Gt!woH{URF(-p#KV#u>+Pr`KCoLI9C^{zpB`tzDfhNv zy6}^lSSB%ddp@N)HCSM$#dzTYTnQLL+8GPUX7*tz!hQYh$$zO6I3f8Y`ga^K?NP#u z_Zc+pDO~^C4Y29)O4Ud(CwJx~oW(!A;jPRIM~UBQw+o4PeR*dW$)`$VY91&duxT<~ ze99z8z78kGeo*1k&(eM8&-TIQ%+(o6!k{dA$lZV4!f@7Ii=pWpZ`S-Hd3tRNs9mps z=bIW*Ye-GeP4|VTZY0QjEUU_iMeUPu@^q)}G9S9MjUey7F$IV63Ewo8qf)2d%WTT` z#EGa#D?&x1cKlj82JgMOKY2Kh1Ew=D-4c6~Vj?69qUyBxN;K@ZYC4D2 z)SvotYzyXdS7j6okh~V@e`aOvq+er57Z%RnTj^V4ZL$r#VIMO<7S$DX&vm`v-y#w~YM zLeL<|TMH&~rp!f`+cQ8*_@-Y(11F5kW)r89;UG-wXyN7M#IUPlF&FmNq;365b?>Y> z$Gb?Lh^HaFLj4%$m%&H5FF8&OrGH2UeMdO(C3+?;x@L3zW*HOT*F|sMZe{V!ki;;l z(L#;5ExI*Noea9YapmA^KbgU$&_~CL?Z--3B);(r2TRcO9_fGJhw8WEwwkh9pW#Et z6D~ZRzMgK3Nz37FfCsnsPGnzxG@;LJnbuJDmpkJByI#T{(r3t!9JpDJOWP0^dBMU#L*)cR>pH{o?p zcp)>*XYqV07TtaWe_qrH=Z0vGpE^q2VVuRtUs)RG+y8Uxml_j)Nli5}=D@bGSp98* zkL;V%n+O-&&W)K5#)^I|hNXRMnY6QjC&C>*d8QCe^=F#ilQn%FG@TpXNqDECBy)a5 zj2Z2Zj-y&~_j1muu-J4oNqxrCjGTGT)DO`SpQapZUG+-6?0s1Ekmr0ttr$LC zw_&yTXCGh9!mhbH>E+=(-xtX`D{~&-C`9LvSOO9yzwv@*obH&)2AtNuq*mV6WAbl) z?0jS`xeQLE3ooJC(uWBL{5jtynftmX_^cO>&etfqoz6JDrw;}lcs zr!l#&CBM&g5zbUT2SryB@Oc>Ce}&n)`;7NF^RBhUV&&fVUkJoN%tA~VLh z+nqRCEu6|ZqFoqligSld#{8Jb?wfYJE1y9li)@0f2sc#2_o?Z6y<)Tp*T174^)?hw z=Tttsn&7zGm!HX^BXjiT;6BkWzDQxraa&Fl&!E@d49vpI(5mP}qma|8EZmm0jyVL~ zwh$lla9WA)Y^Q}MO?nJvj!ruF#e?>9`CRmy;b<6~fWoBRZ62skE9>qVfCmvU{YyWxi08eh$Lo(`cTz zH>O)_4`z#h?(es596CLoS>aJQ_*!wRNeMk;lPTL?%$h~@xz^{4vX3z*;`}rcbB#z| zJzRY78Qec49JtHre}DD#Xae~tlH8@l4q zC7a2HUM!v#%pa0Vz4nLj?32obcc03TMdkbxUx)ePq4w-$iq@21j{6z&?A%bY#%5q^ zEBSM~LaDBsNWJk+ICPjnWGB(7G%eA1z4pD1tHK|DtD_MCT&p43prg&WSYW{$;{sZj zSWx|X5+N(6!RhhdUax#NU^)w&~P}^yIWKT_21} z#6fgF{p-!3|NC_6`IPcAR+YXF6VV~7t-rx(MaYH0lx*&;F0)YL(J)x zQiOq1GJlMg%$@=F)R^d>s{MB>hTKf%GzK(S>%?Nwl`PupF4~E)9Q$ty_8kn^K4=z| zcT<@-aW+SD?EvM5KQMz(G2QhO;CIxf4e4&R!}$< z8op0W*XvT&0E6g9>gh5AdOaD%kv}a7`aGO@cQbh)v)kP2Ny3zfBdq6eK6#YUS{PqF zW*4yBt~Og|eOJM=`?1hZ_M>g)d=qX~w}WYHy+4$-6Gk$0c_JQfOc}CE^wajyL<)bR z<+2|t-Se_)961=R?!g=#Y(a+*M}kvx>7O_pgPOuJ+?P(fy8Unrl-#*_lj+mCjKcd( zdAO@4k^c!t75fk<|BG>Mo+C9yXD^rHEaDXiX z*1EBKZVp3R4WYrwP{QA(Qnk!Ra+M0`eJq)NQzX|^>y4Us_?v2(Z9wv_A>v`NqK4>_ z8$HkBdpF@-b{)^I1Ct4!Y{J~dGJg$-r|!8z{GGq4!1ixdeggyMI|(0O^qe=!Wv;zG zhj}H!i`pdI<@m|OeNmi|zS)=NlKWI%NQL2Nwa)CS8nDv{+thI0%f6L2*qQg8vuRl6 ziErK*8Xt<`h4{w~FD;~nrEpO*N~oFMi0|)jslCI@B-c0?i$YUYNPf-lBgZh&lXX?v5J0+7Uvpl+sH*U9Zc(gZ|(kvsG9ol9L$aLnkXD0o=2zP7bD5fMu zFwa4{}|M^lW_0hj+4s6f~U}N9u{Ce%f zlSRV&{VR{Qk8?HtTr~auv|L}PpWUyjD)A*h|7SFb6HJ)B){fp}5PR8^osp6Y*D?m* zXRc(Ezw zy^4s~o=6+v>}mSC&J(m~AUu{%q64#9GMQS#Y6H)+?y&) zc+r>lt4(l&7iz`7<~$!9%$*aG+r7$xSrL*awa5JC-+<0>J6#Bw(^b?==Xp=IH*RxKz ztvZD7RGmIp(VJ&SM{Uy4u&7xyzGK%4cqcJ#TK-Hlleso9@9x!6ZI(B1tWMOsKl`a=TS>`7Ey<*u1l1d2gmoERmln=ITaICRYr43P3{+7ed(}uiFhaD}l zNs`%XlP&fXADYUU?s`=JD&yQ=@uWx{5LPVxdB-;@AYGrcGlB?cVL;v=Qm6PyhTzfl zO@9xfY;ieWKc~{A`Am&p7gf78?W`}U;~NKX-fJA=&KQx?#gUh`Nvzr8#G6LW6t0=d z;V{z;F4y9v7VORHt#Vft&(?%p_Vf&$MvLMk=6AEl zU{(p8#B=TcZwc`hI-EJYPyOmAxp|`N{#)wsqy~QMsGUXPaN&;59Z5^!wT?XHDSOOJ zejl9A{^KP)xYkhmY7f=pAtp3>Hj!2Gxy%&bLz_P{SRLp@t>gek?~GxloWYq=cNA4; zQ@@Ys%SY8GgKJs#lNY4LJ@W$S6DLX1b@gS zw0CVjKL4Szy66d8-5Z_7G7g&Ij!k%uFt>ep9T(1NGg&WM(D~hkvu2$}hD9;mMmMG9 z?(b^J&%Vq~bY|#w#kyn{R!E-4+dbk}{2;wl*NIdtc4ONb=@tCQqitrqu{{a|D4 z$Bribv-nCbNRHydOtPw-`FJ^m+KJQAJ|KLGm4(a~Ufh?%;wRDYbx+gU^1Sets^3W$ ztNEea|1lUJ@nXLJFB{!vPJD6o=jzc(%-Lsy*75?Pj|qoP_^s!sG!%YArOF*Nn01{J zIK9(=wv&Z7E1uaI!fifoFq~!=v&gw1Aw?kx>=)kU=C8L^Xq|smD_tAX9#XWbbFp6RS+)}b0rgOER2usN^vJnk^(@tjm=omuH&xX`!=tacSbgmy4 zzL-lG#gXx(cQV1X`wWIy#c|_AId8gjB6HbuwP2|}iA{qgv(uc(; zBfJVnzIH8Tq}&r1A1cOf{V7!)dq&m2Fo45r{MjnM`vZea$zG6(%_3t~ivG}eQvj0+ zoybTKw$cGP-+N}0-?9P!-L6zQWA(Y|5KJFA|L5n3m*Yh`M|!w0^NJsbykan4VJ*7o zLVR|lU{JS^s)(N|=g;S=c2n`j{pnB4GfRGx_xV$Fl|79-INW~{uD@c~K2#Vj!g)#? znaSR%g&MCnN2?|+n{QOFKN!>h#smcZOwcbMI{J#A#>SpA&ByZcQ!?YHc}lKv0e)4v zj1Z<;V9mz#+3{V?cw)l2H$IHqX2!moUZkJNlzb%*M%)g-dwv3ShdDrcF$vC+^OjM> z_&!bP@VORE`WjL@MLz#yrewXA-p9u@7GHNFe%wg8gG?qNP4vHs#jHLqeys)h8n0LT zTRU1*H{;}aZJK8cVpX6$I`>?uRgg)I91jj&9M1zai55Q{csV5(j zsHj7<aJK(##?dlo{1!<5k&P2owWa^LCqn9ETl`0q$R3@E$vNSF?rn~R z%^ejSq0h>?VJw$AeD@AVCSK0qtdA?PK0YiT7)`<^GuD1D;#7q6(gNh%{7;Lk*I%mN ztBvqz7RGz2E0fdQ$)BEy&HH-nYC9M2dU;G-EC2S@=K@;vxY(OP)lOs! zf9qU{JB@_3^(Fm(+HyNajh#+@fHNJ(6rr0OiCMPvuMbqK?)i7r+4|O$bdTd$H|T8Q z&Vbd$|JNVMPLH8p(|F>>*)gMPCR^u7{d2E^19x7jri;#~T5=C=y?q=ttcGFtz>Ps0 z^BFhLQniG#39)@6s8ipRH-lVA7$Lqa zD`%SQAH$ag(=eKC%k{~^y*nU2ns!C>achp*h|Ow(XrK?Y&td9^L3BInCz_T_-nDjQ z{m7AA>77oe&t7!dTtQOzVp7E4dhOIVb@Jv@l|5RIk5dC!YHv%u1H!i{$tL2q4>l*p z(y&(y0j4Gd{+Z9!-Z2;^74t{SU+VauZ`AfmeFoH({G6}Gyr1pHh01hVZ1!MCb`byV zjp1fnOVObhaZ4|WuZ@KV|9P{Tr?pSTb{S2!VeGt#o zs-CnEe`Brxrc>)!DWkX7rtQpU%HYZX@xKq{(s?^xxeM3NHHSyZ9>h!Et>Ml{<_6fY zd%pNgTc+{#W&vF{G^f$L>uP7Jq1=5Z67o^>U2+ap-c94dZAVVD_F|h;6mS2uCa+Nm z;}$1T_^}8(@nPnkzplOn>e0(!H12JUF)DDPL(O!?XglE)ID*c7BWd5wibhik_$GYM z%jN~rqiBiF#cOK0lQ|W!5m=lM-+ieAwLWFDPV_HL*89@URdO*TkK*vr5_Gdtsqsm8 zX6Y?t27F45*0CY@k#J$V3 zrwDTlGo5H2oQd{pH|o3!!NFoGbx&B(t}2fl`5x|_E}+-wnpE|@r(Stlknu5-O@aCp zT8|`neY$YjeQ3B}bhsNb=;SK7SJrdsJ~o?n3ulq)`$g3n_Et3zUijtIcGCIqW4CHcE71E4=|V9r13D-Qm}H8}$FK+^w3* zVJ{=`7EA7jek|Vti#Ss2i7E?vsPg)WzTibH8P>u>-aA;bY4RnLO!SpGlUQsPf$?q= zRvj$nOPFwmhL>?`{X1pzu?aiZ58yz=U{2MuP#vmu)$WP`(}%p@-2vN3AyNX989)5WcnA-El;txj+{RYllY~qF>5rN(q1{l z7|mu{(`NWue^kTV`%A{U8+(u0aQ>+)znNswX@mF%jY9Bni^S)PJ%1d}7cO=>6U6)f zSo?!&I{k&p6e<$-_16x}DVb9*bMN@TJIM2S3ydNTU;Wt|zoiAeYj7)xs{_NpY z>7o34`~ULoLo3IV_}o-{B%U-FA>5y9p6nMs-QHEnP)BkVY)UxqFFGUPJ->aZC7<(o zl~~J|%=H1JKC{9nSmro;b6B`i@Pee3@cML6swIPm0c8TQcfu1#thubhb7$CE-Mp!?7MSs6k+4OD*)^iGaFp6+ zb7{RJPhTc+`1dGs@;#aNxP&vkB)`H<&Xs&^;a)yh>nG^4_JL&QIh%9QTK2xhnas5J zWa#%G)=OW_@{kpky~MxrI++IJib(HuUM;lUsfK?R4q`$Q=Qc`?%2pQ)H)V1RZ($Qg za7kFiJA}76z`cx?x2Dn2dnRT{t@vJXM_Jc3<&n!6RxCH*ZeL%L=cVD=*oCS}A6`vP z<=kK=ets`w`SmO=)hy@|WvU-gzKd(oQ1Wx5wFFC9X34Ciz7`(d|h*gk*u;gb7cryR{EoJI)#p>q3OUf{?+iZe5WeD#v1q4G;>uN9VP)jZAb&;6_BMLW`wXq;yZ#7p`-Um~*kHs6fLGN`foIG)3&4JZ>{$xXRmpOAC+ z?uEuY-uhVuSP2ItG>C~F=8W0u$ljyrES%$pt>hGjdPQ)plguM6i@0z%nvfonPx?a( z@1q~ow0(Mfs}zo3M+@%ByrPZp1lAaN(a$zOdOK2&wK3(N;(SiEjz#N8p~mZNn_ri| zcYRda1N3;ZQfBiFt=aj&ncCmwAhZ@ftyTagwWnis&y+u!7VxQEEGZp|HGZF(t`|J( zlX^GtntFHCoQD_W+^jID0m7t&BKD=oC2923tzupA>YIgJ95`< zRYUTl=bKGsx3#hK${aXdFPj^^T)DS7jMo3gkx*ew&+2@ZM+jL{Zw;0qwj3+Fp`AdZc8QR_b6e=9&7? zTAzMn#rz`b@IurAj1&T$^b%-l>q*#%x_Pf=^z?{Ql7q z^R4OPXLS+2?GWOAP2;WPggcAhddu)QF1IPdctCB=UcRTUEHq%rtq~MY6rDhttMoB4 zIe*xTk&XcrH&4cAi5Uma6rgn?j*4dDkJE6ymQLT4&e~UM`@cr48Zn;yfdeJC!<|ko zQkWyWt}4;k_2?mI(7{1GDJkVot!SPOD`Vr_rhGH~qR#f{joD)*dF(P54RRwqK7%Wv z9x{KDOr1v2lHCbQZs$_=Es?*s<)P_%ntm>tuIHouP8IHcrmlONN{06o+J80Uw+*h` z@0P=>|Hs=|er2`2Umvj$5wW`qMY@|c=|%*k5d=g)y1To@?(Xiw?ruyJ5xa{c#xsB4 zKjD7yygFkz#(mb_o4r?D*PNeu#|4kP!f@_p68xO0*R-yNHuDdnzt_UzLyHhKpM}|S zO@#FFM^!p~Ge_u`am>ciV0#SPGaa)-<8Zz&bt4bTv0X0%Loby}{5!qLeK7cWv+yMk zV9CUAtX-)^eTy^SQTF^s*hBf52UOxRpv@fD3P0-KH~YS&k4h+J$=F6Rzy!zSO(T2Cps4y%(-xhY!yEeJ7C3qb)^4r=bXm# zIJ3f~6>^aB$_d}dQHuKzhspVBaIh~&+uQ^^HLOIzYB_kD-4IdBRWbgs4~{Zd*yE2q z)aPa3mZLSZ-lsCNED`@o$V1mC#ZQq5+2O_1oXDZ?@TVf>5A!nFBx?84$MJ4X&VJcC*p?)(V=#%wTI%fP(pESkN&S z@+W6O?~5JIavoE<#XRp}71%wZFM_6T6m5sJQTIEZHES)*X{DC)S1#tIveu?-kBPj$ zO()qWKg)H6y{m@eGT6TBjIl@diXEoRAMTY8&#^q}j8eUWDOLtN`S0YT%b(e<>%y;ghJkYgidV~+!eLJ-+87Uy<5ptoNkdW>M7j;sN_ z89g!L!xN!xMLp@GDNsq*hxR~bhfT^s!(|JsVkYiIrC4m6ZG(0l=KL(r#i&OG5|8)o zunbIc9*g*g8aN%{2k-8{UIT0Nw<*N=M=t2yIvr&PlHp)&fSZ4c5m1?oN%9rU4DOA& ze|`zGbQQ#I4aM3IN?5bQ4vj0r(Vpvo?sL?!dsrMs+*3y2jao#VoBP2AF-am_GWU zs1JG|?0s~wpdbl@3v>}g|Lif_Jk(CNMGMbZYCV&2nk=AUH|pT@Bp*qc)i_i9Kxi3V z75mBSs^~ovOJ-4DGRFcpt@3fVk0op^V_{zukBWHmQCN>E+?R=~`zp}AUw7nO`X?G% zH1TNJEKG6H!>?BAyjAisntGz!nV#r;IUb89>LXLR7J>U&*O#SVandj0o%lwy>gYo6 zZ5%B83?MU*Jb-ts8~1i4FF67k|5CB$tr=cltHqBU`RHd*htJ{E;lBDQf`U{Ko<0*M z;|%cJ*%1ls_aEuv!o8h8%r?Z~*BJv`JXZw=n+%L&AHj&{%{ksO*#Bf4Lgg(m%!ORP zi4N?uWI=H}Ig)*+;pns|+<9VvrPNVdoJxZ7wsK_1^@Dc#JyFB+#xs=|RNdD@p_?1F zY2;y11id;l{n6Yj2^&7TP&-qLd$lDfRjWYj#!q6}1NLI6BkH`3zO!X|FzGi1O~dmz z7uiGc*KCYll7!O*HfV{jM098BY?$Bf(MiVuGZP`k^X zEOmF>Ov}Z%XV%CxpN9;iB3S*YgO*pDa9i!yY#MaRx`G!1seU zGF8Z4?V}8z{$*G|jk^8lYW$n`P3&3xP`n>wguZJ+Fy@FFy8Uy*{$si5cGM0>aze;r z&qU88&Yvf%@QK>bU*zMIPJ19Ser%vmKNPZ`i|{;D6F1W>$-$>)Wt1^S6>!bePlxne z>Vi)5J?le_m2EXNlciB){7}>_A&1qu1efmU;hwBJLOK;A-fIfpB+tZ2?mZ^=wSe}7 zg)kXi#Ai7l3CaC1c-dc3pQMYl1vXICHbzZ?C(6jB%2+-Xp&bKJqEr9_8CM)@sKnsk zh47%i=gfgk!mNIsc+TF#o1j`GchbV3K&$_GEmv)z4{S>$Qi}5NP+b{by%(ZEx%=OXvHHjR2brH3HOVy)i6(PD%u=!(M8%3qmw;gv@ZiiQfAm*SO@90JluXS z`Ez-8xm{#6JQuMg1}O2KjcwE&K2Mv93DNYE7)`}_nP6CJ=3<1FDT+~x{mhqcrayA_ zw@!HXvPob({RC$dFh)ubjS23^vSCh)-ZU)u7KFLn+iJPd>EL)sXta6O&{MAg)?sSJxnzKTXFzX`Xkc&4t~!D*V-|M13B4+eJTE zRo8+P>z1SZOwmu1Sx?=HQ1^v-?u)`$4~j;blL?ZBl_Ezj2Z2@OXJ70Ljk8}w^$YsK z-`K-zo+(N%lU>jH=jKjM*plwUe4i*RSVXB$3BSXpn7X0>ofqh$ z*BmFTOele3@D!|-n}zb-**ML*{{f|X^iE>kK6x$*ss`hQ*;lca`Vc>pAXHJ8muqW| zmtAsr&6aS|HOC|8P+VWaJc$7n2oFiXr^gj2vhMcWMhCNwoq?#!%rE>?hWkCT;7MJ&qp>ukhQAXB>@}e07DRq3xvD&eZ{qz~+B#vw ziRt*XE(Wuo=>W~8XlYF58K4rk_;*)N`Yv3C>fv>49J=w`X@8a4qodjI9N-8ue?LSR zq(JWkYj>OGpfj0bZk#95R!Cv8r8H_DX!E(dV*O2B{O)Fr!CP`+*lI!kL?EW?@Y`K1 zP~TL7v2nS$(x(L83%lUy^$#L>Jp0qUua6t_(Y?hUlMduyI{Qu@C4ta87K1oNvJm5J#vzkHw|fRBDOMF*l2OQuD~I z-BpEPdwHB1&i&acWqke2%r`G19L*)?_g*d@xG{(2iU$I>CBkJC`ve)Ks9%``L;AT~ zH3nevoR8v8ZzH(9C-Zm-c^*ktnDL+xW9jc%ZZ`!1yJN9d7{F>-CC*i3BHD*_eV-3v zWn8;B^qhS--6@zXbkKJ#u$j3+Di`h1@1Y;&c8kX9C#rCdEJFD31TvU-e`{Wf9-BT3 zqeCVL7!!g=2k6V1WsN((3!v&^hsdxHuD|Su`|BcLJh=rcnM<8r3BRGOqRff8+%UlS z%K;c&rwzy1mM~`BZD^bW!kvRr!x}-=crCKUOPLp#N>2)X0?HlG)~zeeOOXO7K95cmr z3xR!Z#pspfinzVrxNMh&lniAku?BlfE*h^6mSfTbIdmBKUhErWfYp~=vClvUtJqhW zmz$3|HEYDN7B}`n3i8<7n1XBpgrD-#uN|6NB^jiLyTILpCL##h-ab zO3s+0S%lwC=GZrHI!@9%+VvvmHu9Pa$#l%FF2&83u8>>uMeL9#m#@|fKC79l{?!(J zl({cB;f&fN-cXE6#oh!A@x>{OOa6Q(wqK zJ{6AmbqOdwYle9%D(F$*^L$Z`e>x*DXyh-kOI8=@2CND7(7|l#2M;Zx9<`G_hB&ik za6TRj-K|&ysb-HoAN4n=$6fMFsAZiO$KLBOFDsU{It`>Bw`Dd=4z@Pf!|rboI?1Kt zEAz5OCRM^BKLrNq+~0kFDJq^l5ONKwNNNg$l%fMh9I?Yr_RAkDQqwaq6SF=g&>@{_EvwWio{ zu>v;T^U%AbLgMR`bUdklU9rwz3fAj1aQN>G+_+7?h8>?deM=5YtWlTi1C3AIyRlw= zg>}pu*Rt?r6|+X#eu}=^-;0Z1^x@Hc2CQFc;y{1;ELJnulKs>@1F09jNbP%X6WHih zVwqnKMygjLPP#WXRJ;&dyw&hyzc+hlM(`VDgS+Io+A0Xmt5MF z8T1j9Gh6OI@uABl(Nds|lB=`O(vkTBzy2@BG#8867cVOa#^jZ$)WHDrzcS~gFaN!< z3^qMaipT>Og~3{5G}>n&VTcO)T3I5cJL~CZEU@8j7`9eMA(uMa^LjO8zosDAq6S(1 z-H`P9qbTjoJd_k4T%zx6;d2WZdF3I2IU{9R-pI+1M8QKnsPrnu`724#o>h*5gmxjm zpAw$rLnn5L!G!`nX!UfUMxFlR-wp_U9SVa>>5y>{aI3DMk0TehohqSzbTB5|x*?>W zYU1UL5PYQ0ICH8kd-3_Gt!F(5E%lnt`MoD-3scM-z3S*(p5J(Ql|QJOdidg-BEHhSyJ;MB89>^l^*E zl7)uoZtRFvlenka<%9=h>#A%^LQ%XaO6Js{M4fv53AO0g`G01gXRla`~}6QK6Q2xja*Vs|D>T-^mDSq&#lGi($N-MyHVqRJO;+p z+nkdxMTee6IKHw3!+VlT{B^rX@h1QAR2;I(HBrIuyi3PI{8qG~? zi%_*@7<6906}3~%*BLO#jK$E~T3kcRksHZOs+CoEtIPE$ zp#y4GDKcNm3gaS8FvZ*!f9H`uKh6p7tXLO58ikPGhB)D03|S<>IiwVCiwEJQb+dT& zfAi7CaLg^$M(!|6Y+`+4MW_u{ZL>r_%V^B0q@V3Mb8~%@S?j4rAwqQ-T4o9hLlC`Rb?Cn<~2`$3r1@)q11L5+O z`(f&ZOU#+aFvb=4?ISSuaR%zG^k8tV0#Z@w(DbiH{-+1xlwq?_k2l7DT50G}?um|} zZZN*j@q&DDO0fWTtmEg=E0o|k4`G=}IODkp=ksLIpmbRTMDjaGOhT)nJ{HXr?6np! zhlcrQQa-2(h(zF9eGI%?j=T@lKowJGzEc5Bcp^HEQ-`S?=k(FqaELR9Yjid>Oy-!H z?u4qr5zzXk$DD^UdEgy;-NEKfJ;g_aEisXpB8B71-iv z$7Hfj4B`H=5(=#uc%4~=aNblk(Uku2O$H@$L zbPX)SmADeTkI#ps*V+2LFZT6%ES6Aj+Bz`=Wh2>lf;~2e=D~>mCHr7k+;B-i_$YG> zD62x#%sjNPpDO9^B)vaLf9IO;P81B|nQSum!Oy&LzmZ&yTjubQ&Zi!mKH=N|?9++I zK~pt6Q!B=+DY5VyR}LMOZdiHkp{UMKL;i&T4Aa#@@GvW!%FDuzG8>$~?2Id|(QvZW z#*-DL2znKdvN`mX@BSpJ4*U=$?CZFj_!b*fr^;Sp76ffN$6-wzU=#Hupcr7%e}@!d0`4>B&wmy-g3+@ zbw%5Du3-11aaW;PtZL@>uzDIQHkd+DNuc^OIr5>dSXl3d8x1@cKb(jge)*ViH3TCU zN`9Rc(|RKR&JA(6MGph)$eLcRjx$zv=#rd+!7etiFL1;_vd#_c4Iyt_g;ggru*HS^ ziO|nt{=hHdMUyf*beV?4Jo@n$0vlp;acMBm{dWUV`YHnbggVagT(zSj93hSLBTVUl z^jDum$}JTft8&NAt;_+ha=_jx+IM6K)G9@}lbSp&X`gru^K5M&e2c#aoFFrL; z_d7ET6I#hZ>Bs)jojln2agOg5h8Hg)Fnx{{p68U}Q*A!N;!Dsj-whjh{`+;2xxm^U za9nDDUV9yo=#qPp>^eh@IS%xhMG4jk~MMG zj2wvT*$9he9-Ou(D(vI2c%>N>{*~j}oE+9$O5w3|Fn)}tYUtnrIEQuF8wnYUL1^%dkN&5pydpOTMvyY-(qV*fxWxU;TwVm4;7H>&rWy|>U} zcqH0k5H)KuSE4aL&;*V2=l`=}AKjF>ed)#-0AY zRFtjI!p&ytZ^IMuh3kW);|14ufZ5saBJ+SQp3d^+Io25V3O49qSp?gjF7zw}vsRIW zRmpnzlu?Ee`!kU9jd~&puT#?TPS(l5>E=ISw{wMOx%d^A0=r{~5QZK1I+ zWWDn9X7-Pcr$H*X2Hx8{WBjYvqJESzTv)$)_f`i3rrO|5r%ZgZWZ!9p2TC;0 zDtUe+x6u3F6LDK!33ZF7L3NNZLZd8TPmj~HB(ebN-H~Sz4h03^H2n*$S2FN>VlkGq zbi(xRPejU2BjDX^$d(x5L7_Q17?r?8)*i3?gE91L9Bb?bICh%45S=_MSE$C$&mAy8 z>Zw?zuZ_Yz-e{g|jBn;<=+ab(?U5EJ9!?K%ei+6-GsU`pthcVs<=Vy^@wIovTCH0m z%-;x0Yt!jDQA00kabrWs#d&XwW23|H-8>#`DLOc`yP7PPEL~!WfyQKKsCiUjAhjWBYpanpMh59s55-+Gb<8Ah zXX_1eV8W)LXG|WtMmqD{6##jwOzuB;p6*bMZ{*~Lb*sX7dS(92+9#Hs;QB|F)0#9x zB(Jcd1|%OnRGi6G3`G0qRQNB_z*zdKY<4AJ-t5Q z@P5yH?Q1sVPBR;BY$EO+0(zaMPBA49ZsRI3YpE<;vOkD#x!Ra?ItX#fWN~u6-K$;% zg({v~&iEsd-}|vI+E_xLwncgpCe>AApJPXSyKzEjDq3U2s$v|l)`iClda1Z)SWn-D z=YnA7Ll$6uCBLtddK6R_!ZCC{&L6oW6ob}@%k=!u3SNQ{HR_nN#S3qzEI=2&-z9x# zvcpB0Ifz>r1fQ@H>^t&CT#mUVT=wxj>y!jD)^$FWPQ{w2BIHIpLu+LWc1Q87 zw9o>FMIC0bH+*7#jl|=PS-44LWSkP+yZFK5QX#s`SB9LwB|PXSE&gT%>+AGyZB9hQ zQWIRiH6M1x#h7%y9?gN&o9?|U)Y^@)W^O8!HMDWo!WJ@p3-LA19!0yTCGAM=Pe(lz ztR+WaOEyNP*5Op~f8y2Y>!NaoG0Lpdv39T~GFIE7!JFq)@}i%``a!QS4L7Sbps=SJ z`#m$^v#}07kNy)UFMSo)()f2vqHp4qCf>C;;4Sy4KJ-C+%ML`RZ%Oz$(hvn(D_J|u z#UQIHELI(emz`R~%}d%?{3ZaGPwC_OA#)6!&vSpd9iAsTq2Gf9+&ig*Uc2b)ACQWW z8bb$5v)oBD&EW9tZOg@Y){kGJ%j*ia^D76ZAkHl6FuB zkyVH!n^$7EoHY7BGRKM6Q3&s=1A}ZE*bOg5+6+e|Qj2nReH@y|yXyI;7HgPWd24t* zrZ;s!)8h}Kw8fC?d@{Bfvc5l@XN`seME#@Q{GumDF~cmryBf4g>JX!!$_(qd5|0<_ zD2r$GhdXRHLnNz@GId>JrLQag}VkJr1=P^rHfv{0{pKmcz3h zpG1ATF7`M1!G-;mn+Kh+j?YD#x_zypAdF3qL+mP^&39CycUuv*=$BFJ*#~DkJQpvy z-gtkbSENh_qxGENG?ZDzXXz&z>W;PShv$`0?^ncnNCkb|)OGD^?~Y0C?P8_03jJ_X zuvJ3`OPt7^;9e!`E7w5>H=G~?rpDR?cW;&9r))ZIoMm4;@SBLc_F05AYNBPoKho$o zOtrVcCvp*Xl=G}|aTb@6I;a_d#<-N9;kY*hf_+{ zNFAMnAFZ}1;J&Asc_$}#8bY}X`EFGy2#CR2uYGJ>57AtH-<(&E}JtE8vP-dE|~X*<$GLbnH55 zfgox@YptVT!f(0T3HIZ-H+vjhjPX=&ev}o~Ma|`%|H*pN$_?)_CXX zi**}Q@zC0VKD}z{&x=shjq_-EH}rIB6m!PuVzW~e-ZZFV)$$d}~JOpQjx0Bua>Ivsf}50373&|yvV_i>&NE@@yy z9{HFGX&BL++~vXY^tOEz4Z)hQb@IVhV=cr?c7fQQj+^EJqrAOPKPjA^X(yyO(@VdM zJ<%_P%ya1YKQEswbDNV6F?%7z04MTXaHd-Uw5K~_Q_xJf{Kq4 z=^xf>C44TDjyE>pr&#MCg$FC>Yu)05y?gX%=0A{jr4S{D!yGV&t8CeA-cs zE*Wj2vEj9lUdq1xl`vEu6-Z*nK&=lw+0+ZGkR@QSn_Qf?76?wL<~`)&!sJ?s$2*bR z0WNyfNS-3sV-)MU&&|+dn=PbI=V8qeH|ROAKKL~jE2x*c`?wg<#nA{Nw_>1MZybI0 zLTpde!s_e*{OhWX#FciqNAAcPdULiOW3Qwr6>Y`_7<{!7LoTKxo&9%7?@!X-NqT=; z&-$S5!8?)J*%*=A(y{XlxvIhgO4+&i(AOENWX?X)N=1+i;PhcGR4wx%f3OY(LkHu$ z&Qo#lI{Bn)^YQ$oE+U_~Q?Mht?AhtSicei=8w2Z}5lH$W-*b4#;hsi^<=}|16dKIwc)XDYRW& zzuy72TU0S^iygdQ@|@mg2fG2RcTu~v@>l?__l-kEPaRC69c_aWq{QteLf{;r_d`&IJC(ENar&ZX_)xz2z;W*NZc`wu#$7~=s zO_uw!4bE6okpj;x)E_%m<1)FL<@5vX8$JkMR{szYRqAj$>kfMlJuH{wJSdY3FKb6! z=HFz+dvaf?2VYI@mL9b}rv_F5^gn#+@>Bfzrv|xwGq7N|34*C9|9!py9WHWTxn?@9 zcFe&5vffXxr=OX+J2z|YcO@KeXY+Tl$?TWd`I>!0Kjv^Ka$kGd3Ed`=Cm!a7Wr~6L zH7p&gGSyL6M^5F%IGpKUhu!o3h$a916I)Bw@GXBDR)*@~MWq8=mgQnA>u&OKfpF-U zfIHOP)om!pOY?MGR;h%f?~A1G_5bMrC3#==?C*s+W&4GLi9WpO3H&cxoqcUv4Ca2< z<$)E}XOo@xoAcRGBe+hYj-e={J~HSbff=S>0~CpEVVs{2(KUfC5npMHt;R;-WFPmp`T7{&p%IC7^Da~sIpUN{3|FDFtHVv10;GB`7* z)`8qD`9K*QWq!bcHXUpq;g3D5weZNs6-Q-qaDiu=R|5huZbAYaQ(SPhoId7d#n?Zm zP~!1MYjnZr=G$WKLtRAW1R%g(6FD2E!YwZo@vqFW=iPLiYEOXF9 zOM0D>j+b<~BO1GPK!BVcrdqf_b)xbAJPS8E6k|Gh8^2GI1##NjoD^zg-E?`+mt6rAziBOh~A9ih`d62Y6(!0kSgBRGGGJe<8q9%NVg z=2x_f(BT^R`kK1pL=&9sYmdxq>fJ`Wpz?7b)RU9YS&h2;71TSO&Oq$7GWchH5`#6Tk$DWAA<)@ z$GNAvkiTw+vBSA<&9lKo?O>GliN#M(3#dLRgIioatUi@WJf5WY7d+{W&~*GEf)8q7 zR@@BqT&Ir6Ma&QL%*Hnt2ZXH-!gtqryk=ghr!Mnk*JmR5QYFH|yF%sASCRKinS9Qv zSXF9_i8@Y**2qJ#I=Sv6g1P1;z-|KTT)eN7%X9GiODW8*Z4`AUE{Hc$e%N^;m-$8P zZ&%u4k#Qa(j~HN*MKYEgjf0^V8F5PUxHl$ip`wP2A_Y{hX%&5+X=3Bc88|Y)klC8l zbL}8+CX?rCD=&CNCt-TM0rTOQb9O2d@~rRPBfoWgvK+Q;)5WX75%WFgPOyYc;7vTyq+4Ew##7Y?G{mRM;Z5qQ5)Rd4Ca@}zV#@;w*JhGPz}Q7 zd-2fhV2nDa68P`tx&Lz+?k37ZHl7imz0?hAg!c>`jn_1-BtaZY^sh)@_p+*lT zm@vB<6WRC8A6_l-csY+c;m6`HVgb^wlK{C#UTw{i-fL_?jAv_w=hd z9j%3$7k#8Yb@0_^-o%9G;`9zRjJ)85bFBC6>1~T?H}f%yy66R+LU4;r=vgz!>?5Ca zA$?~#8tB&M~ zWp-f((LQmWy4Q8n$;Q~PkFa0PSVSFJV$0PuM(fYpdiFgoq+zi>Ip$J*FmT07 zVKG68-=Q2Q9N!Xha+86F-~6#O^M}L=9Pp&Qkjyn08$m z{ViQFxF!dYPi;{8A{zgTyj~$gm$iyc`ObW%1cr4vHt#aQCVq@?Y!2=eaEu z$rJiO4n@LFPuP1$VC7d+wCI$8ETZ=!lDTcg28x)SWlQ;PQFa^>ZQmUXs5 zp22iv&YTJHPXk%x9(bNlhZ%h~#*<_b$;{SejcQQ5=>{VUQ-q{g<3MB{oa&vh+S><5 zhQ#A(Fu8AIis7%HMEzeG_T=;-WA&=gA_r?mr7t{7jZm^jAh{_Y?w$F)ujN^8ZY%;b zjL^8Y2(MnHBk_C*4!!G*C-E};K9#VT{=Jk~J$$v{`o}Y0&uDu{d$^!2o;raYll(LHA)4t`uv+{la2wzMYSpxRuDs*&u=&uZY$+zRW+O{>{JyTE_In zaZfe*3pr+QbFnEp6Yt#2k$Q3&PS+P<=9Z<1F_Xd2-uK0Ha)Q!418}o5bFJ?&?`2Q{ z=00>lz-?b#OG$xRf+1eEm%x24`+sxG(bCidNoiMv&jKw1&H`Dt)W-Q|^pLmaB8dH* zC!rn~T$_rUJf}7&l|g1~4ipcT;^3{0=zs3Eu%iBdQxttT>{qp)u*2#zIn?+9zh(xY zO)VVq%viYku^a)13-H5&_n_J)e59_42X}Q4Kt`F{6$7NG*kV(sVjSM*2(gJ;l;g?x z)yWwBoh$K*Tn;7sYD^-(6&oIl&PSQEyni}8dK%!tW#%6}q)wH6O1lhyG`>nj!aYOs zIm?hXB?A?k=^O0X0oEVM^Ox7b-e*D3PM|+>vK{gxnCnIU?y3c0=)N@xe^N~0$?Gg$ zkc;=7s?Z|SA4^WQ3y;ofh}!Cgx8y~9J2VBZ&Dj{=*9O~UypVo98jbA9*Y>YOokIZ} zxNcr(?F^5;&&53YUJmKHL20KQz7C}?POl8-Uy`?$7!MQAG^83aGktmq&L5*M`3%pw z|8<1ZsuqzyKnE>%L*c-4{G9<)@xC?(<;hNX(-eSr^lbRXSYq2Bawm=QFmh4_=V4~3 zG&~hwsSnBB=MBHu$@t8(+pU}gr-GV#8;9Tk;V7Wf;lQ5L#U64LCO_%-0~=c+KgDl7F9wI9kJ`Q<0=jjp@W% zgcZ8u`^zZqMPg8KeJlm3!giGQcYJx@`6v*@b|m_#Q+{SUv3PBzSLEJnj* zGSf7@Fu*Sv>(lixWJ3-5jm*QL2<~m#LPUl=C`r9Monk_C` zXToBtF3O%%WBsvo_y<(Oli9F4OJ50TedfpMhT`M^?ynzM(qxXGYPML)efJDA4{RUG=YJp@ zd6&wl_2`3FTi%OX3$$TU?2CK7^x&T42qo4bwpf{?kGU7lzlw&@I7b}jKG$_QGX|7O zAe|wFDNo*ub<8p>h&`&I$}#z0RqYXZ229E1u|(EdB_ywm`l^oBL^k}s$ei^ z5Eia@Ach@KM*cci?5rRkTh$H&OS6%oY>C?!+_2zU48C<@c1(CNPTtSKCVHJE9ZzS= zTXFPw7c^{8L+egpAoWidPdZ@nLgtJ9bHS9&p?Lh9yisywPhR2sqL_}9a@HdyyiQ5S zD+^~vMM1Mj*`WguReJW$aKBOGj7~hS+2-?Eq{m=ow4~SJAo+Vq-YOR}1K*gu#Jam|@JkD)=@VD#W3DB6>4O3-abz6pXT_0-|7Xg~mvV%<=D~aw>sbEO{odz=pDbNgh43~OdkT%8TQSPT3iPgSWf8*#VDLT{GE@?8oDq~3ntM*~bYHpM^s zUB0oN_lUlr8h!dxx6Oj|O5m?fDZ0(bLFME!EIUe`YUWR|{;eS{EoLs8k~U5y3*2H| zs^fhN?C^C#=JODk-{OAJyMlSInYeJSQsVLU|Hu6US(Uqo{}elV_2cu5!JpP(#2l(Z zgwbp~LlH6_=HlY0ARO3RfyLXXd+kyy@pV3K`6#q6ycHj{3@~P|JG_l`$i=ilR8bLR zE$#5`Rs@2K>2*A8h9_?2s3%|QVaE!I$CLE$C+T%c`aC85ouv1tG5WgDvEC%)@62F^ zSULWV)W(=$HprY(iZixm^v_4Zbagf-T@wuUosTsqSOW{H$3IyG*hbtDyYA{^>xn4j zpJM)~ygf|mS9?KU?2N-M@U}}sr(WcvEv`n39rYir%z+S3h4}PGq^#3J2S-nA`K^r2 z>MoeeT=mxe_K=I5fvdj~5gu*?`JxI;-;#s2GJa1gUxbV6U-7kn5Wny5#RX!#$By|2+u~t#tO7tm&M-cEn@j+b$l(F3io8{x(l2UH7Nrj zn*^pD41jVQvxQ_`@h!OohW`qoQJe?+D;@sF*J##M#o!7joNzY9!+SP3zB>LBDkcIKHl4;(W@#;j);K^;^6+I}!PdEpTfi>-x;!$y}O>+qcL^Q6cN$ zOEg+)?J!^-H4tanFP=>nM*nX3)qY8wRid7T9-&UrhB)8L8qJ-V1JLS-;p^zBew#q= zx-NoQXWtf`j=knJkggxbY@;ipI@}DKtP0`x*Z^8LJ=E%-6$b3%y&0^H(fu4Tmu#V}pPBc1WhUl?$H9`EmPb7Ak4wrx z_5KR1S+9UH%_l->B>OS%$ok~DOS;Am(~i@}f6f-6D?D*pn)SgG0vlN`81N$(a_=fI znd`%l;r~R3iOLAr=7FfE#^|ue2D`oT5Ws!fPELL<{^fr#`ElnTG5M^`}SX>ktjHj4QbhO~ZA=YK$B)0Q!$! zhzG3OBnNpTaw)k92kl@Gk`AxW);LLqfeiJZDdd-qy3Aa}fE2_s-%`?iUrEOssP$Dm zivJ{x@->jM-xIQ2ch7TQze_e1cgNUZ)79ya8cN3YKzfqe$}#3?E_!V%#=aguM527N zSk}>iSyc%rnWuq!-R)pGvH-I+ov|#&A8Q*@u<4H`dZcsTqL78DOKWkUrYACEW$`C{ zJl<_H!1@73SU1fX(wnm|eWVM{&R~DqCW?Fvfw9+0IoG5jtydWak3TQU{%jRzBYn^) zm5yvgV`c*RP zIGUh-0W&`KaX;b4yw(G$I5IbZ%xD`lH`GF7R}Om3t;V|rBe8%wmeMpW3^bpOf!Smy z-Q*c!j2|ioNd-cDS0f< z6Y}7>%mN1|dZW5L4tcXo(E7dzayzr2uT_kwDbg4*>znXcssm@%I>(QpZ@!l;a%A!$ z$9d3agdb#I#9?@VK1OF!uWy}!wS2#m+&bXY#a5yCT?3n=J>k1j7bD3{>*tz_!UQXr z?U|0ouTjjkFvA{xd&~55FwlvN`N_}3jD){paJvzvKFq)saNp$w=ojiU*BNr`ja*ydhNE7- z=<_=hW9Yxx8&Hek^~_Ysp93p7WM*| zwBH)=>{^a}mouPj!hT$B2W&cfQiw<+bk!-qf1czhjdp-+bP1L&b;G@s5Hxa)IjgJ- zH}-FL$K)VOW-gx2{wijTdm?P8Te@Wth!+D4uw%F_I&xp0H_-*#^Jn~z^D*<4HZvGX z5k>#eSNa6>9*uzIoA2WKLRHLvGX>9g>Z5Y73(q&1X#H%9Q*m}!RGo}7i)^rk{G|Ig zvT%P=xy0|MTxcg4k&p6*eA*UePpl$`Pid$lzEl>V+13RcGJK7DlFFdA?RNghRN&0Cawapp2>hG`%aq< ziXL|DB30T1n+-zn^3)`3YH`Kl;0!!wUfRvTDD13EL;V>ORQuIq@zh*QSW<(tG1tX| zv-5<9oEPRVoP$%`pB0;%Bj`ghN~;C1b0&&X$)a!9z@%66Fne^35qxVo*tocDrSj;Hs)#M?|i~alA`8aZ3hFX_5;+T>K z*1Y$}04)s|MNY-pxJ)P;I-%8=o;7M-qN>eca;*wZ$~o*WRY*LZq&W_f-k+UIlQ{AB znUF3tB)iWa1C8iAumWDYlrZPe2B8J%Fvy8Ql$Rw`HkBYCEf)u!nNwl>N4z=GECLIe z8_wJ$uim-{DWtxNI{2sStg#|59G2_j;m*B+!LV}7olNhjJu$APuz6{2Fy}9(U{}Ca~X8UiLHVi+`L@s^o zZ>*SWajJkl0oIAmg<QbEIM%%qQ#$2iWd!#l680ZPR?9*o;e~j$QUl{(2I97gD{8*Jg0LtiBKKf|^&%jHK3_7&=( zg!)gcPs7y-6%zBTRtCKjvNMl}k1_i2zm$sbkIaev;()vCUw3bC#mrJaxNXbD*k$VY zv#ttfsKGx!tQM{dJK~({bK!lRI@7v1Wanz)O-C1))H6qAg*z_TOhv={Y~=W=V@z-@ z28Jcz-;Oy5JI+3k`7?1OMw5MJ4=kB%hRK`kapeDTcGhoIWqlL}6a@>rdmP0=x@#}G z8&pz6knZk|ONrRs9b+eU7j|P87Gn3ip7&2UKhHC>=U&d;XYci0>$B#N&)d@j9nJ+I zqBRHama*qfs~n?FWnm2G(HkDLhry}O;`|`?Q+;+q?RI{j?QGd?!{;}F>;csPJpU7i z67dDj=%fTsp!n#!)x+p zjxVi1ccTnU%4cpq{;g0C_$>yG(LmSvQxI~*2*H}xh?-5#(quav=@^9wzvJC820v&mN!+faSPSyy(v7x_=-Z%?!tb7IJpBmm_&1 zb1!=jGTJcEnVA8QyT-U;D!VH|^`NCoCP+Jl9RM{UQAN ze-LFIG%?@C8?CENac2hSGMlM|R@r0sf6<6Zi$>}NE4j%(2I>GR3`~GQ574gFF9npQ!sHB63>AHugbnYYoEZ zErzJ)epWiH2>+_N|GzWu_n;Icp4GtF>I#H3CqvJ#8uR43w4w>JLF#A9`IhC2%sw9)_JIt_9#)>>M2wFi+w$i@t?5 z_87Ct8=L=-|4E(bY;iR+IN8`m9jvRXGS0kzD=xKYz<-uMbcSf+%6n^M&&t9nzE|7_ zIKsj<4)qtb5UwX9(~EnzZYBJm^uxDocFX=yBU9TM$y+qBKFI-7XCxuxx*ccFrr6^c zgZuK#Nqv`LgHb#Zlx3)JX@i#^e~Hu4`e+^PgS=HH_4`>Yad;@dzjh;yJISH!Hh$a1C7!3g&8 zkOQGtkPq_^`rx;;!O4;G=y*+web<5H&l+GEeLAjp3SoSlJf$l^SmqFgSC!mXGpi6W zhIg}()#%akQPlr_D3*ZyE+;ZKUPKNk9J9|*TvZM^Fq zi^cWi#x8Qev-Sn(|G*WCyN2OzcqYc}A(xUlo9jbTarw{;@`PH1jocOScDWHAHHM;; z=ej4!9XYJ7`PlMY2C3^k@6rZ-=M<4=FcMuil5xZP zQB92p{6CPr!~4TwKF2GMWx#wCy8*h%;PyBVUTT$+*OQv7YoPxu@%DXjw?z-dd68(f z(8B7URv7Y@dFGckNX_FJ(K`HWl;OxNjr78Ip zlX0*i9|PPS(dTarl&sTHuzV6^4eV|F&MwVw#pGjk!SN@-R%*g zNB!)X3t1X`KhQIN6r1h2IH2r$AZ`IMVEYSa%m^y@G4`dRyiQH6MYcm z)qYqU2K9ni$oH~`TV(|fEh=U&cA4b$hV6PHtfF5Cqni`(wvQjK9yEf{5j*6|=R-BY z2J3poA)t3GAhSd$=loXPbMeNs0%?Z5aq;*e@plY+k7lG`6Md?g9emhpScnX72Ncek zj2>F-*gNlxi;Z=dXGSLKX>uSX?w?ZElX{;HkynO7bECLDb}Y5gKxmN%ykvwOfI=$9avy$-%PVQ9)u zhdlY;PNg*nY|2OLv>CV*{#a~Xc~LyrMSY^dA9|YXZ`{HCk2$v%HwVm}5rx2~>B#PA zhT6^Lxb`j^tACY4x4J8QwLgeeFVtZ`y}(jV2M)}zZvCB&iH0@^+RW_Pt|(mUV}N+h z$2M)|9eEDt9jaZB|M{f&%6b2k+%yC~oq$jC$g$>c;MM^<+*waf(EThZ&@XiTcMazA z`@GNh;PZB$gi7LTQF4krvc1tb)4?1)=x=P;RDvxRY|-i-kB=VVSpCBs&o0XlO>OG$ z?kdUaRT^~0)jw~=)PHr@cRdBO_v<6Zj{cXLJTxi`n7j(XJLcGrm(%Z`RfTpooShyo zhO)LI`>k7|fNC&(8bng_MLm6F$!%1OMv zqXT})9*N~ob;)guM8rpJlq))+q=vrd<*w-6+lL*jWRJ2ZruIuM=gGM+o>+%p4&4!) z{X^KesiW_TAS|SgJCf(*mYjUVpLWEOWkJT=s+a0$I3;XoXYnZFkLR7a^4*VbK;=&-3tYrPus*7<7QwnmKZ3) zGT^3|zj{2n9|^{^SDd4qVJ=l6ANfC+Bbx7v&^^)U*QAdv3+X?qOvTJo<>>Lc2SPlQ z&{jqcwW>Gt)*6y4V-Gvdfkt+7#^cwXoE?)H^vnnYt*SY@%)zF$HPAhxj9q80i*eM~ zuf5AcpQVOaeZv}OyxDp4$pt3|Pr{0^X-Mm+R z>Nk!)Ro*YUhuBkR41wqGWE7njzO{g+KFqEeF%tyHW$Ej#tELt=r=77iK_~PeqBT28K6U!1V)pY~=U6 z9}$Gx&8Z03J%+iQa%`HAfQL7$@Z?rUWQ}|(o_pw`(<|;j7rBqxy8OR)eXXAj+GP16 zcwaI*?yPt(tbvTZLCUJ^M=$S;O1~zNNABIkzz`I%UmEg#5G*B96T@UhTcH`ycAJSTisaF z53%cii?=3fP(E)0&m~sKerk=4ZTNfev4cX-Dd^KL8jg+BdYy|Qmz$6Iy6o1I_&yq0 zp$y+>=3ytW=Q?gGW;g4Rd*ezkKpyNWtkCtx6jV3Gp{=PCHZQ2evnwTtS17}>iHbP< zvsGC3(u8(gFe0-J5ftEpvR3xFl9Pgy(~u-fhUOS+d>_V~J@v-L$10I+@9?8cfN2HAaQ?zMY}^#+Hm2jk0Ct@=SK-*{0vyh%mb~6||Ngjj^tMPT*MnyF z>4;-qCTh9`&&0)0YO%oVWga+uI|3yYW@K2%aD@5&PHI8%aL>{8;eHB;4)tk=iew{{tYz@+^mgjXU)+| zW`kKTs3T=M!og-5j>Sdd486U+bBfUZW;(pLl;Gc=9dL|3$96aLG5&Zs`rXsS@-Qp( z;P2((EIWXmHfc}dQGHzljn2%=+)Baz@70ny83SfEiRQh{B34@iF`nM=Zqml31vb!K zp9{GcR+#7z0i~bOXc}XND@)4Je1 z@3CV^Ebe&$hv!tI@M#`ymdkL=SA{z04WZg#fHAkDvAv5L7Vq%Kvaabc;(YqiF8Yw< zm>p#{aHRiCoE%aH>-N%hH=ED*guKaLu|M4qXJ$`f)|a`#VU|!MpH`uYXCkxg*SM;3agU!sq zx{YvzhdFz9l=GpWT7sor2SWR6hohG=fB zVBbL|Mif>`UT>4}Z!z&klW?)r#k+NpC^}_;bM)5@T0%DNGFKdUAB?}9({L_W2O6=J zLF1ELC7G})b8?f86$9Ge_O|`|ArA6qx$O&c4wr^^efFtY$tmho) z;kRu3OLo!w1?>=;_)(a)7-8J`D9n7OgV=aCL2w zf0(KY27GOUE~B;3Hklb_~O(m2LW@8Hd6Uzra5kB2s zh^BLT?81mdJI?Rd`?;ZE3weRt95G)j1wHN4@q@m%E*dlNcybYnWi^s>rm6Sa;?uOt zqQ9LX-h?G!{SqC_|K)^^!;7)P*BwizO~S%^Sy*;n7hSs6;$B4-wrrSzStZBB?9YqD zymb0QnQvFqACHda=Df!j!L!vA`;>ff`cejD&0}zYeaXtso&CaW?Q`{;{R66SiCXvHv^?x0qM{b%ET89~mgsrEgK$9?gb<7Krz^od;jS2Msf8TR6!|i7{=B zaeNnZXFChA&XDKOQOv`Bk423;@SgKOe@*&EPFBGEnljM!t9bRn0FR48QNCXXzoOh= zb$~sT=FSK{#NW}hbR;;Mqh?PnvZ%++@vDJPUUyhWoEO2ITL(Pje0S?Oq-jm&nLZBN zep=ulnHOC|J_h&o!Pvj`=)_sVBySl8G5B^NdB4!TZUUJGd&HPaQyJ{cd}bl`%@E9F zF{{k`(E@t2*DF-9JE|P^J4z+b@j6wu$FDVHavo8`w!71yzSI=0JnyzQC`Q0Y5Bl&z z0nR1DdzfL>i&~6M&&R5QIs_f*0`sOH;^7cA=>D?7bg~F;Ep%ibY$5jSb%pE5Kpc%` zk9#xEH1tA!-IERV4dw87#omFCzv8gz7-*_GBAegOr@ofZV_v53M0(^BCn98UIQ0&F zTB?)dQ_=Gca+U56+Ssv{|kYgXz_r zdz(3+L>clu_T=Odu$boCaFMW z;WZJ)Il-Bu(NOkRhwWigJe`L$~vJ!;Di$?4!sS2m+AiC}o!-WlYFB8E^*~W-JR-`p&~I89+8M-9=dOV13VHNPV8`J$ za&6MnP}fNd6R590aw^1?1+MU@^I^w83NHB3Ki9b))tpaP*VkkFbMoM=TZP3<9d@%v zp{>0E-cNVL(EY`*Epx-$p%Dlil?LA)^sgq@;xBu*Z|tnYagV;}#J<)gA>)vf=Y)O+ z)YCOxaYQi(88ieoI{9LqaU4>k-LQCM86Gb#L>A95>Zg?P@J*BOJgJZACV4PP(ZUnQ ziTEcrAHUgiaEh~}wHvdsw9XRh+T^jQ7Guoa87S*E2;PBr#c%SS{Oro`YXE(}^Zn5~ ztORom-Ee%&L`?5hh&4T2AXFD&B(?dGTjpT(&g*rTbd1EwoyjPFZwki|b+{4A&ib|0Sa`K9{_K7#?kQ`*ymAUk=WF4Bfin~ebFl5J zGZqZ-M$)`Y1P?aE!ugf>mrNeJr|i9!I1iAzo~D8d{;a<%hS9&%lk?3!leKZU!3ruz z$hCN9iJ3iIkzXGR8!IE+I4Z;b;B0bs*za8bN6aw!B_GWL3Pvqj_g zDUeG`MwYb$#-jq(!YfpHF7_*53I-mBewaX zBs3l#oEKPomt*+Gbn5Ri+}qqLOh0$QzRlxsB+3lum5gwfKH;yM3*h@78F>%r5tvLC zmZd3V{^f}3lMBU2axx`;KB?E=u2^80)&OQ?Y6q9k-q?RT2C)mxpzmCUt)?0HMx7{XR!7`rk5T$ub&MYCi_t^K{mHSS){_N` z$@cKrV2{wni6~Uj#I4(9P}my_qq7w_<@H;Pd;doaxv7QC4K7G$&fT4Rkj|leIH!^e z*ER+N21ej+Hy3md=Q^3CNJ=i2yk5ib&%$fYZ4vy(2pcXWlC|cGjYqAKXgUL@*Erzq za2a|mi^DRviI^Q-L!J`5-?^_zojchtkb~#Gt0J#V8;<1gxkj?5BGw)gbPAxi&IwBU z*;medRe_5hx|5&0<8V5eh1HVRlgf2}ZlQ?dy}yYn<`d^`2*QX*rZDtz#Im(T*re@B zc3CJEDO-T`tl1{NL=b5%cyAUR7^Qm=FxG?JRW6O`E(va!h)3G&nM zy-hNH%-6=zVKb3DHxHhvGx+zXcBb$`oZoMNF9u0?yqsL4jh19K6e05hHI1oLq5diw zS3YolgFhSJ*ZrU`>QM z=5QXa-IG4|S>&dVQO1FnKZH8(jRlvSp{>mK!y$Y2X5_&k*$zGL^7&jBhm+0%w^Yh; za$YVX3(6qh`>S|<|AH`DLH_#UWSqRgoNAUcYJ>7{f20FK^#gFXa|)LKH9-@(8kh3& z5YBw#$r=Tir#uzimhzq%9EjV@=Po{E2^SQj!I)hrFQ%f6PaNX4Ot2xFOgQgc*uSbk z*CctIk9Z+$mrX!LUvDJOG{fX~j=1oH=j5kOFc=xduEA7vcV%uOq8w-bq4)b?xn#fh z^7)1S@%M-Fjn3Xab&p`DRBlsAJskgTG*~g#}4`-UMPGP z{@rd1kE1#mJ1Gct$92)h-3GgR6yUpsJx5Hz7^a%Y^C$BkPs(s&NhSjADkS?Z}{h+U#f+h|0SRgb&4r7t#SKtI=Z~Kfomrdoc$ONRnAQ}PppAlSUmPEu93W+)bnmb zOdsUNeiZtxx>%5rz|M6;tY7O&P67G)9v__#Gs$ctc z*df$fwu_h_{urK49`ism6gdI~%vKC96=+b*L`KI<{1qn9?=%b5WVj@2&w}Z=KSJ5( zrEu(|ft}Po#@#l={54MKkWm2DLF^NwR+V!)36aCCQ0`QL^`ZHgNk5C!^`O%R{n~vO z=N4+CPlFfwT+qXE6K5RXmJh{Ij;NCbVc+Uxs9y(0ddToTi8*LyQQ}m(!kyiEUXEjN zC(r>6r%ZAAr9Fn|7BXw$z&@Nnb^^x3?VBZ5ksJAuzvt=S%g|m=iNE>XqU8m1@x_Vg zPJhWz8*@yfA9Lm=6ZVAq;;%v?RxB~cZRZM{%qqm)f6FEN#5>&Tg2x};iUIU5880Hk zeY-a1_UHV!EFEzTHpqBKPX6sUY#u2PKB^35Ptwphp#+`-|AF1%m*QV?HSIFJu(Bun z;C66cQD1<$?1Gwf%n1{I#~|>R1;YFB9C4eR>N(}8`q%-(S3eZio*HPPM!F@#1lB)o zF`4&i&2+9c+8^p4GGKU)Ju1`6*-x7Rjp3D&*IOXbFD&&QH)i+^k$LK*=-;Z3zw;8Y zbtrpx&O4#4B7M}&Hc*I7g8srZ99Oo%saE!DGuP9R{+^KgN_h8RlNh3-4~HgpQ;=gi zw%8oD@A)3^Hp8S9-pmumV9r7VSm(>g@XW)N?-g)*F#y?iZ$xj;(HL~i4WU-XNIvF( zH%0My-b@bmF@ax$*)5VOu#0(uy7QcgJf{x!qdgv2w2Iz6$bGpMfPI$?uzZdU?yoDr ziTe)NIAAj7X~e_jy#XxfOK)>G11c?*sQ&m(JpFb`RFHdc@^Co(*vtArjqi2xufGSo zpq*bZ{OYo>-@y=y3z2p6o*%md|cX2UP9~!q09Q zNHip)>UbsO&*$R+|GiC%e~6L=H-uS@Hq@5~<6Ddt%HLU|@3}naw6jM<8(%!Wl87B0 z^^o3N249zS#2Qpc&I2vJ(v7{=??vx}AV{oYd7i&E&$nb#XJG)W7URbGe*HNuD?4tRa307v3oaqx*JG`7;O zal;6$$C!H`k`D7Hm5@zT!iz8WM7uqjkbmbz_N^60&#=PGpfU`gFX)MG462vMA;Q%L zQG3YdiKouErc^S&;MSskm|OZ&e7!XW?#ypRZP&*?Q^_DZo`WptQOd4Rfv*dj_Cc=ADUXRxahBq z$K0ov+NVN$VI_Mg6yY~Z8SSo(MJn0#Z`&In@Uss_t7PK}b>0B^FeG~?K>oKo1otzY zEyZ}iu8ox4^7t+8iPL&U=mdiP!?5r=w?v6w2INs~4 zWw^Q{8-;ES0oTO#RcY=Ohff#{Nui`c3_;ai8~-W_kwisQ4qP$1Et= zU!>5l@QatReV2Tg7dl=n0>(*CtU0ieS+K=DK8rkmBW#??KHB#5Vj6q0JDaoX=N^bNN=Egj9L!L4g6wH6&h9Eh z$fa5&j{GC^ZncO(>&S6zod_l7&c2_p#i>z+$bHFP8qFZwd6bM$1${K;mP5-t9l^}e zo^LOYr#C-|oDTe*be@bnPeWWG$K!TVG0b`JJ-sCqNtKB}Kj!<%UOM_LAE$a&O4jS7 zKG*MTev9?w5LhRT!#h=brgy7WESd^`>M2GQZD51uk^1ej! zz9e-%Ep?7A)o&{GoHg!rXWW{(M`-;tfMsn4mXFXu6Z=Uus@OYwcvTpVz6#nn2}@`<7odwG`zo?s;rk&QLtoNQyOWt0 zzf3Hpk5lS>CC|7c+#C0bxk=U-h$MFAj)%)49ej%_q86o(=jZ7W`W%Mi7nsYQQHr~q z_n+lGN$R{f?xiwT_iq(;r!-L3iCq?5Ij_^UgPTSk^dFLgHh40Q8nXAwOkjHdGK3#t zKKw3qeTn;-)b*rt`5#{CjORPP3SVOlR0sJZ+{pk_cUYr=Jj;d58=7BqMUYDrZVh5y z-AINjWR(ZfFDG@LD%D4pGN3(uc$Y;j@9m~4Dfro$d8y0pco3M6ngVAGrq2FMjIV-G|0+3$q<@r4M!>3|;(CZWr_+d1P^ z$4OW)js1pSm>V~%fyX%V&*E#acxWf`%btouN7Ydrr zU=f2*ZTc=AmSCGs4s$FeQ1y^QdyPNh#9KAY+~7zKjv=1SVSjiZ`jC@>4C`22&7F?v zDIPGHQjS&gIJ*d7C$dQ|Y>C>Gda)b|XH!w% zmG=ONJa7DEa zDeF69>8A_A=CLLe{37tPMHjO(tT0JN9|UtqpA(p|b7KyLI-l$33XD0Kf!uf*r2hW4 zY<(^I`#lm**!z%jH4JtZy5xV`;ED%zw|Ez1wFba(T{5~CPk;jbJ*pEExp!4d_A5$V zZ|~AB==!M*3>C&fN#6`fg(mQu&))a2931v?Ld*039Mnv}MzR|F{Vc^{&hZYZl}P49 zt)ABptG_8Dy{$f0`Ax;LWG&n+cSBQ~0=$~yf-{Y_aFmN9i;VB%oAof#$VKq#nUdE# z+Wngt-|4H^tf+-RL*^@@ji5D@T=pS_c=62vX{}RHo*#vz1Nu z%s^u#IL9NNbT519_}IXl(zb#f$xOJ8wG0n)nY#R&(-wnzerR@)z}shvr-ND z$0uVv{b@$2j_9gRE|rfBs(bn4W-a?B`dK2|S%%-e$Uz=jj*O*p7-yw`+B-Vf(AFJm zuF%&;?YRHOd>C=AdM0NIf^@@hbb&zqgEEX@KIKA>GRf;n^?UEl?S!6#ABwc!YV2+I z!p}(tIIQdp&1t!?ImKMGuOH5)#pBjn8$3Ez1~a#O{7o&9JTD%ivsTEh+b2qu0uf8k z*}ZSZ|Ic6cAIMI#Ek;T zW+*>zfe}}kOKa+J7m~=ykjkf(%6&b%s3(qEd=tlH z^bPj&$Ci9eI8$HfzCIIk=&vjrV1o}MQqVeHA1B+@Aml+ZW*@FbdXXYzf&;xQkZHv;-H|xl|KNg!)bkHalA)(_ISl&$5@S~07c;2K|MClg z_c}8ee6z*tKgD>XM-J=32+T}Lfmfg@#*kyYG%^P(?^H@&PwM-2YNuCXM8yN~Z@|F8)2|fF1>IaM_uUgfBLDrcwX@9^M|BHJm5?gE!s^ zh`nrtn~P(x+SV8|XFJ3DGy72Lec;&193JeY(N599$C$4bLw3A(1=-;#wQO}2`nJQYG3as~oSVoE%a*?0uhtH@m-(E&do7Fz zo`#e{9lWx!LF2(Z^hmSB;uGFzFp5Sz{lt6tcmJuGhpvYzAk}v$RnO`_u07sPY!oNA zk=MLC6z>Kb;U({+_c$*uK1uy%5c`&j;^6*=oB?W)bIDw9zEy#m>GCL!dLZKR)Zo<4 z4;4Yim{P=krzeGw*L6k`{XL4>sp#!b|LTh}^rTOE)$0n$zO^#rccMeDBO>oJ^Wk$c z$=}w+@>m-@=lgcjX;+--6$sT%X~=3(!@(QmSUDtM;;C9xcT<5U{Tki&Xdxjj8fw%( zo0!A1H7$bw87EA?J_%3Bpne->0@IG{x9iIu!rB_i^Ej#NN#)N=(hbYEd=Sf4 zjzLMFKZa$|_bX=$A8H^0znS;l;K`g&96hTBxPM*-rzL5ypT_>Nu07yBv^yCbqp@ML zF_Lx}LG7R;&JWDTYWH^t};GPJ~IV%HSjS0(zhq^>9RyyUrQ5F%bZ z5bqDuk9c$n=g$J~G#rufnchOOq}SPnU{h=)x~g&3wxJSZ$;ya6S_=2c3h?arT#O8w zfb-eDsPH#HGQB{4kMiO9)d_Wczc2WbjHU!~4K9}9U2rm1Fn2F?Uy|xqRO{6ibDPc! zpT34rx*drm=1tcY3g~|-!Wet%>|v9!w>SkS|Ixy0&KoBxrsD1FO36HkP4_u79C%Z# z+^LNxL&NdOSqHgOyz!Ql+IpnFGm7#mQB09IfBerzVLz@ke zNV8JI*2ON&H*lWSpFD_UcWjAFhv#);H0al0((N2&pCa6)18!JrBTmN!^@;iz80?Dv>Vc-(f42ak}D(dkb2gfa2Hrl zD8hIpN93-Fg7559bUJH^?oHKXU>2dWxJvTplgf81abTxV*fUXL&0K*>Ao3ebVa@aS zZsxoVG~6*{cnI#vlbe0sl)kA7-0qx%iR8sf)eD?V6d*VFjo9O$34PCCT;FMeS@qUf zzr7GvcFtIPBnU05;}HEw7kFHbr?*m(GN1~tOZ(vkx$ZB?oycAnfmxjE=MS>NdOp`n znVns}nKLA-Sj?=_rdOm2ef?6=u@`-d67S(>6nmk$-4|glKN7}n=J>dedPKe>7W%Vi zdbb;Tt(u0FLsK#Psx{X1VrS6w96U5B!$^(x*jxQpJiV`hpQ_Bv>X_lt9YQBf68vCz zhTSykhImMhM@(oczR~|AwJ*fYwgc9@`6PPo8-wd`K$WryI`clA<-&8;Jx3^qOojdQ zWK{OB#^dc}_&J_EplW5fy15rx$mza#nEOOK_8W}Sg)R5mCz~>GHC7<9&KuYECgW78 z7b2FH<7ajW2KiDWlenJs=>FKKdRq)1W`bJwX#4Q{44CK)!<+(i+C)8**<{_`892~r zfg8QZv8K*^i+)_G+!!6h9*FS#Al7$Og<3mrY@|0b@E`VbwH9E}BS(BClYaH11Pu9N z0YfDjE`7`;mzTX#&Wh-LtVJA*9|vy(?w^qs*tC@UG-ne9j;;t^6^$h#9j`d^9MQfU zYvnn2Qjkf`k3}z0=HC5VgxHLNdXq8ClMIp9!HGRf+0c0Ch)1Em*t{hX>zHpe@TLwz zzvX??5=iw64_09|?&B7bHdzl1$C%rd(cg8#67AX)X#;JeB247>x~n)ChdiTEyW1FdMps~!cMdjw zmO<)zQu*vseX81zdZ6yk8*@-a1s-{1s0uUyAqiaTpjyvHRT^|IKtp@thL;$@9c3_33b5#XY){F&eLti#je3 z!^7*4`t`LK-1dV|fj+VhP9eN~JX&jAq3}Bo-(NeScyAEK(9`Zr=baFaIE7Sj}X^Rn`3(?Tg72n<2kCz>ff!DM#z(t1qs5Gc7tA^D3 z!)vuK;!n*pVe`Ti%@_UfZIUi#INQTIuLMnp$Z5D0ijK^LuL?26eZFspZs7Z&l07Pr z*0U`2cT}IV*Nu(mCQ%au54qy1RX&y~TVq>DAcB{spuiy*(06O+c3-f6U(rR7~-~_A$(ye4dOOMzQ$Cy}e$Co!tjGck0XjE$ez@ z+2^Qz-e4Q+WxV@*^LQUof`x7+XAoaWhg$AfdvMYl6xGa z_MA^XuY%7mFT~B!x)|l=2meML*zL4~8hO9>%B)Z?^MUQQC{!Dfah1b)2l*O9(#j>j zkEEW5e4N2qY#a9UrCH#8 zO$AmD%f;Z2rIK~NZAL2irQ0f+KQW(rG!X9D%z3j-?y|1C#S@d zqrmy$r`O`~^b_J=B`sX+7XuaM_0}8N;Cd#x(%SY|?Hh}+&lBKu+6*WEtH2zE9DJs^ zVeQDySk-z?JTuUP!-E7Q6zQOnT+R9m1(@T>JPmB{l29sA` z7o8THVdUHtWPKqYfOq3fWW@B^V~UOI12MgUjLf0r?L3tM8bz>ksFXbCl6v0l_VuTj znsrpnQP70N)Ns_%5B_*MdmtVZLS?uIcEwM_!u`~~nvHR;y9__Br(*+qm!#guq;hJc ze!k7aRB*}VrSM**iG!WPvCo+~PWq7aVhRxO&K|05e9%}G4LO-1{+nJ&9w@aD>TXic zpHh7#QhAjDil4xL_P9CD)-NbMki+7UD4w|cH(g!=c6w`U8_#7}G4%cJ7~n>%ILu-hBfu`x(0VfQd+`4&-iI6tir zk3Y{u(8YdO`~0GCXaG9NO3*G_18;oYaHVT5Qo4I!BQrF?|FDBx-vk}+&&9tRvf%S` zE*3s*i=d88BJDqR9K4A}2j+qA6k9;vxD2Kp9PlzA1Oq?PU(nkShmOl&r(1v}U27!I zi~su|k3QrxEH@w@YR6=De3;;qg*n5~YKu1BKHu@{t*H&qcVpDj!WZN+j>s zm6m<*X?BwcI;4;KgHccl)x~-f_AR|E!0m z1xHrA7yp{+V9oPr9Af^cf0+{wI29mco+Hcy-JyIXiN#dx-8nG>YSB5kt3N}s?k4p< z9lodoTyx)vPx(4z#0AqMYJunU!SB>4LDyBzt!#=q>WOoQuy=Kl425gI zi4@!r@-hvioesmab==RXffhYrFYsCVa7#k)yD=G4ZW-VKdk2OcPDRqdN=RK#>U>%% z_x!}&J_wBeDvDIcBmU|nvMRLDT4;;6o7mk)@BhJ9z>er}*9(ZU0tKxmIPM%G#SnyY#4p$GmcXIOKUGB9+X8U8&{iC*4$FeU#$ zYA#YLPeAH=UYT#jr=jv#uv`O4WYEqg@3G^0SD4?+M?LkdZ$F|icM$=ADkipm=kP?M=PrRR)e9 ztdPuSm%84kgl=d)Cr^*yD7eI$p)LaCT{_?zxzlI*IO53pU=)vz#i_H_=tdu|zbjv@<5QszmWAr$S(PJP7t;r?;)*T_Q{dPp3D*}IqHe7WFDx=~UabmU z2EP{PEf$EciuJhCb194q^k8{R4W1#@?4LBj{6=%=-HpM8QZ0C$S%3}K3Xzwv4E7y< ziM>l6ihU+}FzXyahPpo5H(8*SS<>Ilb{O6ihA4ea=|4yAYXgoiTS`9EKWZV(e*iB*x4{nPnckGiJi9{I}5j{Y%U{ zJ{|?jJRzH^hj;WBkIT-7$slLuaf6|LGYJzenLv(ue*0TF2)!l4?(f=oNzdr^!C%GJ zZoTkvWgLD#4?*0%YD5}G;}LVSS$Fesc33d>?5xBB@`?l7kq21zQPd51B2pi!W90Gx zY`ta#r7HG?Y8BziC?^EPM8dXb5*FU#J>hx-xKoTnFMvhYxd7~OU<4s1@BvZ8e z1}KgwhFXd>CXWli_G7Ww6GR_8J8@0a^Vy%t-qknV(UhnJquyh&K#P3eczt}dwS#>d zX4_&Npy2I_Z;6q(yVU?s_sTH)Mk@Oi*pHC6R)pNTAf7k-vA44lZ4FJKZQ_Us=Q50I zWuEypdy`|c@y62v=|>miczp@NHZDSmgEComZ^Q&2BQzd~Mq!)=>^8aZ-I@=>Q4VMh z@P>yxwbJt@%-_!-7nk~(NsVM)tkm_I*LH)l^(Armj1hj%%*8(rI#{jjijNBmVP5Kh z_MN=Yl$DMg{_bP$)gh{)5bK+2CHH_I+x%9%X?`I_wH=Q-`uJY;(8r%_&igC*{ElO9 zU~(9GZiyS_q7X}pHE=Eo9~l9@t76pjG_c< zgtpB@;zB+8dCQTN9?fT=9G6GF5uWc`#NKNrcxuOvYVwYU4r9NTb_sqf*`X*n64oPP z5HOGD=n2*6Ra;0_RSiDw>-|HO5YySF8Si()M#{mY=Xzs zrJ}y%;jwBBekjP}Z_*v{yHuA~`f%vg>p(rt2|Y~RpDWC z9t=*Cd)VcT@c3{~tTNTZp#J1H?AO4o9!_XS{)=)p=l9c?DI1%L^OKE`LymkU_p>Wg zt8riZ59RHLV#Nj>_}K8fveCzk&yKiEKVJj;lHY!xip1Gz(36=W6)cc#Fs?k@S z_r`?}L>6-z`_iYN(p?|F_gi8feciu5FyHfP5`tZ$Q9QyJH_n!!Fe4l9y_m-|_$`X` zo(WfPUG)FVe)&m8=oo2(-qh2bb^+pMB)f;E!}5kb_Q;p8ccBR0gNiU~d>5R(&mN;F zU2Hw%fdw{(yn8!fB;PAeuFT18Wj?MY5!C}tkw=~3rw;qTIRCr$Ob*ROuf^iq6A(df zV*NC8ga*1I*_fQK0rt2-wnhT)0S^zlLM5yc0~VEFJ9QhW>wOsiN0?22D^4s@!{=+( z;7|_96WDvJoQG$_T#&nRB6>K*;of@n!qaKOPmrBs-<`?16?Tx6<(883Bli(6y z43irw z@p4aTogctGQ&+Ng4yDyUHu!WAoxYqy-<$dJ$SY3j0=ZA;Tp|OLS${u|lzq(^6;+F< zTYdQW!=8vvLs>XT_(_(T=)H0!vecJP^J3XtIFyvcA~p$cWoc_QyDIRJyiP+m zVY*u2Dmgutz8S>j*>J^Z9Q(2&F!pe#iDv|41LS=;r5VeH99E5d%;+oGO@F3K{z{N1 ztvcsn($t4Q$wz-DJ;vdZgEnx)EQX$yePh5JTzAx=)q)$!N%*_wX(?=*WsJVB=&XdV ze?`2Iq3&b3*2get7P;B=_6dKVB7Xz>iK~S%59q&cf6A5RbVVEX&>gi%9Pwt zCmK$S;EM-Is;aymjYJ8u)>NLSL{=IrF9JxO6&fK*f%92Yq;;+oY=D9t3cShs; zPc)&2gljIn4jtcV+)2;Zcs)%&--`TtwDYe)lTU^uIk}3rX_)wa9eCX;kFWb(@UJtD zS5;H!uo}9$6mq~llbxd1dFuL7t^3-DzrIRN(|XCi@;2n_U&2PTk~(jY3l7N%}atC)qe>)fHvN(H~Q(`^*O0cCIvaD(1p3Z$2%VNd6A-_b*hOJSX}WIS;-ZApK+w zpNpoSPt)r({d`5`|Ejs!x743Q4agZ1!|Pi1ENG6T2f@x1%&$|V1C z-UnvO-=K_Zfui5tdquq%Al$)j5k&u?gMrl3hb?9H`dEDPUu>9YB091Y!vCIE%Fo9s zbedd%rt5{>YsDX4*Ojh=1u>`7sM%*A`bEME9WB1C1Fme%ABu~dA#C;!X6w9}v^y71 zw8boq*ZXhPcNH@7gNnF6gne!N(e7fzf!m&viI&bEQ(dtZKgs-l3G~`39J}ap;p3#S zI9>E18a|iMZST|?t$)>enLAmI8^azQ#mGVjmI&?H^k6tSpv~@hH*k>H8wga z__Qjek$D?(3LmHz`9qjJ!?C6PP+&87sh9V}0PGrl1 zLNr}Z+d+#hv7c4jU;_eVPI1}Ah$f}Z=%1KQ!+SQ&d@UZ*lTnOcWu8=MV6Dkkms{=qB7kbA8cI#7t$d^s(1IzoGP=-BHdnbj7178J&kLXq@fDO_}SP zX4_MAeiGHcrZAwF4FTrD``wd@Q+O$Ro4it^onNbScZV<~+>h^>CUgsydbnEZW~o;# zABAJoE}nVPM`=H=m>ru^>HA9RKdsIT(K@d>&$bliK??p~Oz9y0g}%@8*t*U^7&IRE zToHb$=tw)WVzpA{&st>~->0VQh0oB&a?Ce%aOM!!wT)rKUK47T2+vc_?Wd$Szy9_}9(@r# zM#wOhC0AlqCk?G06I@y)P$d&^e{E z`>s0D0J^zkaJtNrlL;_XOR`x<$o2ImvDk4E5li$i-%>91PBP89%+&Zg)2e!W?s!vO zm3e5dMwzslV!$Uki_|!tOR$CTAS6r0-Xe*(ivtJ=sO0q2WR9QIe7#ls|5fETY*oLl1m%UdsXdZs+j2^qArEz|hAD$iT$@1sxDz^}UWTouG` zFD?F;FF7GKi{t&ASfw?YKK-ZS=%UzEOL9UZ(B+KvqSA_U)e}AS5Mp|;Me>@-3gU1yQUeSB?aLjGxQ8omVG9L`bn6jp^@bW}o z_una3&L0cqPHqD2UJqqbrTAXHPG#H2y1yyq(3)d^xEZVAjG( zyb#}g{U)W9eMu+Dw1~^!zNnXntJV8i!fPHBjD>Je`mK{*YM+rOw*Aw#Hk^CtL>3C34Ecpa8T=5Tu~rUi3vaSR(XoH2hPxdFC0 zI0+~0pI)7C`SYq8=wd0~!Dw2`d3RcC8;qW3;c(xE4$E!1Wj}!@J0#yp`Y409%G_dQ zDPC0{RGH+%+Wa<%T}dOjyGMMZw`5LvLS}x;U3hacjNxl0(`~*5gAV2+sV8)fkerec z&A9w_t@{7@Z(dikSoCr*?n6h@F*2S09?qP3JA#%!aw)p(L>G&hT=|&C0h2P0Z?DPG z56@LSBk8j*j$-5<15P+Qiw8A}1~*;t8R^UKZQ=>vXvEh_$yeJXbM$!?w7Src1MQwE z?RwG|9uy@to-sx*TzJ|ri)-B+X|ZE8*Y?NZ(nC1wsvHx^f9SQKRO7#2e*HSA*06{jR=;83T7(G6(OwSr|_gVz= zy!AzQWJm6(G&+dy*D1zYe01?tHaB6G=&1WgCa`>AiN@D0U!#pp_dCi`xOerV$1+K{ z=lk#2Q$9D7OI40sSUQSJ&nD6PmL(aB3pvwn8hwI_xY|MT*cV<<=8F{{1|^a--Uxkf zS799FF;M!_kxS&fd6_8RG%#4S07$GQ8yX zaKqJ_CRrocZ8C|JDZ)zWA-wH`Tw2R}+aRkxk&aiC>164%hRY01e42|V$nz-A;`iN- zSS5|(bzmaTtEGQyP(pcDDl6NSQfE~&nZxf@b0(T|=z%aEmm9Hsfas8dB`2n)D@*2% zpw7fBCit22S>|J=&(hGnCc3UYch!pqwb|0f49hyKwxLw0Lsr znPPB6>W+|1enyn@cESVIrTsHi6*rXlt#SOJC7%9k&O`_2f0!Gg)qyCLLZ2R(2MSM4;L$C)r!R9n;>%@a*^8WF+r`bTs=UNd(yT*3R%n%+x zP$tL2^4RkFA9d35nHm@^Ief{H{BAfz>S8w?Tpo-g?bk0tZqzi2L`ioW1Y1;OGEOS)9X&o|u| zr~OU!s-wrh)`1lD6K?Eg2i)3Zk-yFnoj>e_I~>EJXmbkgm65Vi>f3IT%kA<}`Axa5 zTHZIpH!Ozo+u~ClwW0_n;@HqO&8-91TXfZ-338TG;LEme#KBpEv2K1*xwI%HWB}-waqvWY) zi9W`WU0)}#_;MsquiG)=c`l2D#XF;}_>BE(vVZ*-wbe&*tA$IGxIy;0o_1^{n^j+I zF?Wchg9_*J3(>{i%%vnS6Q}dJ8n5RQSC<_}Zm0zV^hs?V!IU%$T=sh6BmG}}>0K9k zgp<{LI=9O0SvIMZ0nM|it6PF!OWEHi-Bg<&8E{gV7&o&Fg&Xe4L*p!-+k0~Uw_qIQ zJ7E8t%n9W09rmYWEX0;-eBG&e+T1(xR9z8&@xYNGoJlw4@l89K!Do=%&zW7O?sBG| z#DQ&wEZ<*@(=N$k5&nUu_c=5EnL4%Wjf%ZuNacTH`T5)cn^wY$?Ipk4iQ=aez0E$! zZg6cYsFbQQe99y*LvkSY_-M1JNgcZ08N}R+ZiKc~RIPPp(X1S9{O!)JL81|gilbpF zjFGwa{-_K#9Vo#%zFMyhNsd0iX$FnF_J5tz@`S44WUuEAv5LeKv%QWV2Q_r_Q+Z!8Q^<(TKnxds{R?Yu3LiwzfX&8i#Z0ys>!tBu`KHgUxK6*_hyqc~uhUazyLAp^SUigjZZ$ ziuZpn)gLo1tCX5@&ea`9r0AaXqntT9KZ~oMrJq$Lz1G1~sQ%54W`l%hEcMoktCD|p zr#0aXZz|6qLwu#yTei=Xo#J;ntFk${%Z5X>g|{u=&zFVvxQ&%L+^rnGiw|1U^(;5m zXvXh>a-b8M3!RVaT{9a6mb2F184t;#is2f-Y&7l>O>(gzh*N==F^(q=l}J3 zj>~IvWB&=|P%1ixym)dS$$9?1BUL9w-%~Jx3-!iOZ-{snH|ujqzl;FgX{>Xp;?EZ1 zBdYgJJ&)JL|F02j(z4*xYDcy%6D~koR~D}G#lmBfWN%p#rBlq`uTyxFB6UY~E73&k zQ4>BJNlucmD3|I}Qtd&zE-75ov*VgD9_CsLSEbC3gU2g)UoQJ;Yx%uzmD#lCH8s%7 zhzB|o*pq2YmGl)aUCLy^Ez$LzoIp$4sVwhq#`|6+yi7{wKj{l^U)zLCNy^i@G?2cx zMgLOEoDrgb`t4vQzH6K$b7Txpy{B;7!A^3fiy7D}i+1;9?s%vL0~cuVY1j~YgbKGM z(u_&s5APe8f#+yfmVfl-%;Q)_8o(RdV(Od8IcrW4Z;k4+IQ6A^Q#6pwN!~18A@$Bv z+5a|X;?dk4z2E)F{yl-dJ4{K;Ex>k20-xmlr0IQnualhY>@O;y^I&G4_U2-eDN!3e zvF|VEYgb3=&+%vJTH#pAnQ~g=5_Y!8CV!fo&o#Wx@55V_zWbXxIdw2&vR&x4*O&vl zoS51%gU^YM^t2hr)1EP$oiLp3Z3@`AEDeV_(%+w7pDTs`su$ml*z6uA3|#}!0ow8D zR~7+MuYPLj&5o-RnIJr&0y+P05%yR?Q7K0D(&roaO_i+gkCycFoMO$4_&EK-boJi_sO2j?z>%ar z5`WyPWOg5wT(aIW|H;naPdRUOf7AxrHDj^ePqk)BJC?SFljrU6zn6;TXFH0U24gof zl(bH^v>2R@^T-5lJ7s8mT}H2VMDBa479BB@IdvEh51O$5koYr9bBQk$-kz|4v{xtK zc~9m}lgo+xKAq!-E4cW!A$68rSEv8cm;E@DKQ9?GppgsjD>LywBfpDCKW=Q5oVUr= zDD z)xMT2F?*o`kLhxJ^BC?M>v4U#9ofRnS~=GiyCyO#>oAc;9wywYD&djLTH1Up=0Vr) z*sC9^*Y5r_6`f6Q@ip!0Bz3C%-J@l;Y?9}Kh4U0577t^~;xhU*PhtD9GB$T_M4R6a zsK%q9`a=dCuIX_rVFXdqj~%RK&s(!VUgu0_wUaZ=4p#B&|1+smOIa~g8?9w$)o#&U z^uLuvBhft-@3dt?U@pxbI50G86x)PbI9>!4U99EzBRRxvSCnhK9@#aizwM6t^O-SL zK2daQChy&g_SBn|&7Fg;eAOR=R(?FUElk+5rkFbqlNsN$Oym31bUlj=A5^bVC)EWd z{*!>obULMnx13S6MW-{qmb}ljM$=l}^J7Ew=`A_CcS@$x-@6o--`=Z~)sL0mD#^d| znu5n_Qy#Q&;GZj!lh()$?ZOBWcTGX}qU6gIl(FWx==@}^cFJG0D0j}N?yHU1Q5`1l zOB0!iIMVNJCQILWuo@SBK z`x_fYMoSBxcab{aV7mB1O8?K-JGAtv%6R%j)rvLZ{^&4v)f`3}Yw@rAo5QzQ$+q{1 zrq8HketMa4dY{xq3#M|3Yuk^@R&GPtET=l&eWOT-dM6!p1fnS(d})NusIK z_m=GQEUXS0^89cq%SNVQyQo}hrM9^B6;`**?eA}yK#GOAqzEV`WxnLWjf740p(+;aqEs&IEJ2+hkhw zbfWuU$?uz#%kiW68b6mFS~d7E@|3Fg*oZkt6B#ITzB#pIE^{lBvHy5;?|~0#;!ka{ z$B^Iq$vmz&i9mV(b~!zOtqYE->J5V_O_+$y4r}hzwWn@}46J)Quzj^N{jK7qN9RhM z)K$eDrenIG3{BVD5cg9R#a~f79}edH$g$K+wjgVg^!GaD5WdG%G}Pg2?mCq(-NoOi zE4iIZr&FU%vBv8;ylqUgCRf$NM|w204rd&d#BJB4XQ<}A7+{R!#) zHxs>n&s`nZ^7W4DFFDwQ_XIJ$xqNES^ zSR#T=nX+Ic;nOpi+t-6*xBcYIBRRw}^JyLWv1kea>h$v z*xY+v8EQ9@KF$feF194yp@ar~Q`qNNs`2O3^gcCxp86l(s|&NLRV_aw7K|7}X-naz z3cJx!G-@?UoR~Er1=}W*nERK^VgD>(&%;cHk1ZzST}`^VJy+EcdStEkqnGH#-c7J& zs^sxGEtT0_?;v6~Ok|Ywx3vQc7;GqYqs)Edj6bQofKMtq-;At_!rJYqOC!<4#tYwW zdlwtwECti<@C3fxu*JBMaF_e%V*W)~i49+=u)_~jzvgm2+YrQ$WIet}ZJBW}i+Q!2 zF>vz7uu%f`HFY`bT}a5*L>fuIVea;Q%CX4-733X+V`e(j-xzUCnD`?8|6h*hXRRpu zCnw_9L7#wem4wyHqGU@IgN^D)AL*s)x@#aSg=@2DrRcgvziU1*n_alD)HY0*lN0ff zeZ1zs%Km&j_HVUJ`5iT9eqm4DFaK zIt>?&YjRC)Y<{Z}rEhwr*?3xOnKCxQ6X(6szj!>7GhTsQmf6;KA5-cKsN{&(G+bX) zp+RaH?+pnsRH-K8LSf>Zr`A$ z)EoCs6>S>C(}Eywy_9_84(syG0fe+(&Z%l>F{fc~)h~?C1q_k4@&uy~T8Ktt=z;c( zGLxB2BWLVACrGYwEMcYA*z7GJ!AKY@5k(rW=TzQ67(}%&k$vAMYK(Z9P1rd|aT#xSAS%K25LF^z*I%vjMvg*P`gN0af9nI4b(OwZak9 zx}L$HLObf{hw%M=B#BGK`#VAO+Q;*F-?NZEAG}i)at`@;!;F*tg`d?~ID{wcX_%Bp zh_x4MzJ?LgE`fbs!U@bT#-d{~%WG9gUD%X$e#g|CDZ{zdGoR{tgIFEmjJ4>6$6j}0 z+5=mfZb;%~q#k|8&!WHRF0OT+#gV^i5Vrh@8Y7m$xkn?Z@z9cGRp#7yDjdPyc2qtK z=2YJZ>L1tRxadRXE=r^5uM&E$yP|Gwzo1?X8_3aC!8FshU`czCpEsPwE`LSI<7n(` zB3Y3&oO&DbX)-*K*aP|Gp1h=Ly||_pTj;TLcLW_LT2t?{ExU3uXfjtkwEd=Ha&9us zYS|L8wuIw7ljtrwTus-j(OvQs10O5Lx8fHy4Q0dMrnnV|4&|}r>Kzc?eL)}}uP4#} zo)s0fOWEz7fuE)5gN$pVlwUnjegUL*8>KK6~G&)ssi zh(le|@s#U^wfU$%?75>>iB7;`RtyoHjflMHLiT|iW_o*3zAFg39qE$yZbWy>a*Q8N z6Ypmw^Ot;4&;Na`?&%s*G+MGOh8uDv&r#}s;a1qV5Vd*?nd0lYd)$OSX9%x(^fX=` z56Ax6eiN2fy;Zdr4VJlvE89}7INi$`SJ~h1#W?Z4a~LlICNX}wBbCSV`CI(Tufp>Q zz4JqzK3c7gx$4v8aDs4;Oi=EwoOQ}(S%C*8UxR30HwoRnrn28ke(`Im|I#b5{P{|) zcRQj&_UiKQp(#9?Vu+sT7~Xx!V0b?_T8MY?SsUp|*y>R&`Sh1Pr|{FSOg!3kc)jeI zN?WW;K(r^{9?KkkuPw9PbMU|8M%aiTy5&wL@tZMQ3UaxnlfY=tLN2fVq0(x;R8=E} zuxF4nD`lUkmfu0_nhdPgDNc+2?AEXGlD%ikoUb_;%UtmFyDW{bt6KL_?MnHio=cwI z>~q1CM;j5`*^|g1aRsD{@7_LwJrkz!P;}Ivca_u1PIz)Toa-$*pshELphW_5qDh+223yJ#|CD^@`NDXXcawqWwHrK_{dIXkjO2Zn7*5qU!YZtghbQBx{G);mzBknB69<%;mJ#)|g5~>eNB28s zM7+#qllVS;!{VsrH=a$>-+du_O|y?N#5~SJ_h3sh2fa|){|#Vu9b1xrNq%FN>~-~1 zm{Z$<*s+q+b8mvot&BKap2Lv`(fqW^V~2Y?zBPHEf_v(7JAXR)^Nd(8_xJagG|B6B zLR&OGwR$BHA-ap{=V#-2CYOnSRd9S$T|CA=Rc^{aeCrWRu4Bri_HuTVoXsuQ9hmYe zh_4Mq7xNpe@-3xD!))T~mT;$j6RInpD$jty(mx!_8ta7dxX>m7eyoApVb)Vp7L95(bKNZ5g%7af={->0Yj-I+Bzm`?8}k#JV#rWf;B z7@ExI)FO5VG^g~-XZ6Tt2>Bt)51Oe7+B{qj|qDji!sFj~QFe0r8o-zbNJUbNPO5 zysHk~G2!v7INo`R9_g7gDKoQZde4GD&EV#>*K}|R}Xgli065N%%t{LaC&hf^ThZ0d-X?k@A?CE;ru}QSb1}!g&F5I zx^Pq{iz!-8qBWdA2YJ^%eJpzZmW5dM&ZORte1fkxqG)V=@uBL`Xr3p%V$A7Q;YDO( zCb~_Wxt1u;r9x($g;FE$EyLD8v`gYg|LOQz-F@&?wUB+P)svzd_UW`hWYT+D@;*!Vmgny$zn zoy9$4v5b@FrCrLv)oE<`Si3be+pH1`jd2HO;8eP|#G}xqzu0;^@)G!9T8Ob}XT**E3A@1WCy1WqY z#XuX@2G3xcbvEZV%|O%jo=mLG&lR=ld(@C`&HQQZY0BmXPU6YVVqgmgyk7^?GI|2% zN~}1!ql9+ii+6omtnqzn`aDwuwfMRDxN0k!#w+f*bZ(?a?T#M&-Y1WCS4MDh;3zg* zYKoW zBt0e`sAS#K=`0hj)y zgI+na>x-Y#RQxL)>hnTXtAj;)7>pXp_BSTs_zI+1JC6>+fCpM}9AJOC5;M^q}LbM79Je_61k)P4uLl zewGqd-j1x1H`TM==Jbh}%yJ7u7WEpztk85?h;AcaJdWS~OGmGRJuRlsz~pik&$gEf ztD-hOc~Z;Gmb~ESW4U?NlvTs*(dnDRfHXH6)(*f;NBD}b4gXiy4j7lhh>$X@0<&c$?k0~*{{c|R_wO(TZ!DOJ4L z!)%C*w_{AKoWX~WMyIW4*^Z>r+N?kBs+3>W<7sR^iyC_MFuHMH)%nkiy1ypTJk1o# z(e@15nuA_*XI?HF&9IKK3|MNyKo6Nq+fQf4(Q<5_d{uSI&#TR%U(xqTp?5Pq&P^P_ z;VC(cz2VA`g<(vy$Y8Ua^;|DjacW#1*}}_j_oFfM$NjC&G_eq!*)+y=HYTpGEoW!t zQn$Sw54ZVp`?%~!btGpiri@O7S=8SxoT12XDq`|2^;L3ulZ^s#I%dTCYSBA|XVF>C z63bG8Y4s_QnbHscc%hJIZ_`+wTtt`pHR!YKrLyd9!kTe`)aYc&OK*FAMrYGtwF4*a zhH$J!98Z@C$K_}tJ49V|B&JB?_3D^5z^TDC)qlJ>-nNq|tTe(-_)dmF6QusIy@f?)VAR>#*ZKbSnX-=CVuT|o|1KAhg&8z@30-o7(AS#`>>yFfPkj&o& z6EP6J+xLxfo*N}OaHEPy%Mfj=`4zQ)jS;IYNAok?l5%;ktUDs_ux3s;^a-Sq)Q$Bw znsDobu$=ZxB`&3aC)1k}fB3yJb{oVt=?~c78;(yk0o6E=)-3X<+Q)_<^P?| zw?sLg_4ug_=e$-=MvI=IekeKZg(H8$g_&ow=(0unnZJVYkaOMGO%}AtFXhw6bPh?K z`e*tNH7fm;@@cO}`;yV5em5ZSi3|OQWUyz2C!;EVD+ zd08DTGGn4Vw>uk*3H;_t=|kae-4V{d&jfV7NY3eETfTQFG)6@xxHvu|E5Y0Is7-7AhQ-D9OT9LZm83W>iWHF!m?#`k%4nDlo$ z$$peIg!dChVs^_6t!GxWNEEKE)c@_;cwuETfd$V+XH}9*!^kQ4&do>D`_y#31%EY` zY=Ih_OdE`jvny8OYo5JO{1T#h8G|$aMs5W4jX~Scj6m_t>z9w|sA^ z51DdW%ZA^#$o`S&h?jjZSp~6}UN9zmt8l5>rtpXGd(O6LLc>QZRP}2M z;$NoHS@xy#&XS$jA&c}{Zqh#&{zSr5HgE1t?vXs^YzQN9b1CQ7R;$8J=hZE5$q}}i z!e|{6OwK#uFTcM{i@d10bsTp)q+xtQpT*)|7;^N+lE+QZ^-sI(H3; z7QOF_0hvtEaYFy4_#cKPpygr3pIyso*;+V(^Mos~y*dU1bbI$>eL_UuHa55VeVAOw_XfX?U^tBm5dunj(l{rm3A**Uw_Jw`XJ@SH+d~U)0{Ugs%0jp$Uel?Lo5bi> zBY8Gnd`koJNUmAT$sTW%*S97N9%0Jl=B~W#JA|{PUM!Nn>BOF3KVSlRi$ZY@lx$mT z@tMvnl-yV8r~GWg#ARpHfdCUWbd6fhU9fc+GY+^AFr5@`ZT7=)!Z|ZKd z$11N{pG}$We0`%Zuj@ouqHx9TdT{nbg{f<}%}{bufjDtHl~$H*me^-R!@q zx~4-!I~gd9GIOc(L`US4iC?sw3!ZqeVplBfu2~Y6TgvyQsqE@oMrc$`W~XVfbn*aZ zy*DHJ$8i2U?m(V?HhY^p651e!w2R{jo8ds3%)tl6W%9jYE^Xplal-zqQYQwnAjORW znN?|b7siw1Q+^gtV=FgLa`VOsC)!f-YzmnDeX5+f3+UXoAurzDQ=9D#=osKfhv%kz zk@nn^7)u_h)u)ylHt5Qd)j;Go|y9yr_a zQRZsxZo9L=WDNDY#LH~NkS^j;e|v2reVR%B=FA!lia(+H3^3=Lx$p+gn9!w{JEJ0V z>Cn~#1M4uneoNzGAMthmEF-}?iz|WU#Ql9)85&<#uifMvp%cgX|BPtez?J+X;^X-2 zM5~>m)gO|I*InV9WLGd>{Mgr?m(yuWD>^K_tj5UsTDwgYL0g3T<7UUVR>Dz?a-_V- zjeF-3Io{G7!%Zb@le2c^ty0E^w-TS^Bh~lIaLFu+z$V^=8A(pO$jGOb^amfz8^wd% zWO*MCi*8C@=hdmwbI_;V5a9&rX0xT2@V<6SW0R>Nd7$%f2#h zZZ>(MXUNX^qVCnLiN$W=V>C+Ryy-C3#7i%#hiHjCyqU5rnO{1oL=W?zxOy%bhfCNv zU=~R(4TV|tSpBm@m-bPfa`qU?!$S@%n3_dSxEudU7TgVCK!mTeV4=)M&Uc!M@9;t@ z#3H+RI!0XknoZ2jPzXwFMxh)l!Ig;CbqKcQIQOFrQQ{(IIpSrDn zUi?=rt!*hfo+PF%8^U}qJDHgXulkE{HBACpRyl=n;uqShQ^9KO1dfK!V14Zd1mrzX zZ^sy8XC&Ws2LrqeJ((_io6XN$co6Ty!hK0ZueW4T#0<&6%H(!(C7Q06v$qcI+gw+k z7xel4-e|ULm*3eKTXc42F#naz_*aTv(Q`7>oJBKIH;-MyzR$ZM?+G926WJbCu}95m zkv5rjpY%ETQ9R@)vYBn;&Lw|e?EehH0sVDLXlUl^vdJt6i`V2ejn6}{`XXmL*}&Z8pQ6l z7L3jnkKm#l`h4=DvO^%BZYN{vHH15+^8Pf9qbf=2ISuc#;&Kh1)O)UacQQb`(-RkhPyV5<%(*0-XVEne59e?f;gm`~ zN%COPZVq(ik5{AkBDGDJ_|@l`iEnyBBF(Og@9fhq72Ru((u*HWo%-2ql=ozA}dmd{;pSy-c3fs*>|!W0K3SsGKLlOBgkokLRQw-|IkUog5+~yGCxy3>-2c=#g8>%14;aCG(B%mVS<%C&Q)XZ z*gr*bYNa;VTT0*w;Wm89)%dw+`uVIIZ&#aMJyHwqSn}@sL@FYOFj81Ozx1c$5hlL6 zqRE^%lT6d^!=*PkgTLc4aOz!2_4D3b-EdLOe>aSS>!;(ZBYKCGwp?gj$l70y+(>h$ zdR-j*zFE=!Y9$*#Wf5;SgN5z(s_#QDs>`m!37V5j!VW`LNgdy8UnbeoXR5Pm3b8NK z7`0LKD6i(Qt=n{*{+9nXf26#;pDCTTdKjPfM{E^T|KY$>saN*Tv%y^a0N3kB$sX^< z7rSB_Y{*9Y-$KUh`>d|iIIddC@8NQnC_d_#5_ZUu2QqhZlfK={U4eKTrb{huKt@3k zS$YYqUMv0b4xLF+A5^&iP@Wx*A@iOwuX=m&q9BJU!5&=u?m}DL>6E=0&gbnj+2EJO z#62_k9BQd_)qEA}9?pTqi-?VpyhRZu)wN9I>vt!1W>4esxm5N{)T2YE-{^aJ0`V^v zv&Lh)N;b*|~QjC39sT8&JYMFJVoTN`q_sy`+gGrL&=JL*_a2S$>w`0XSrdPw0~NPPyKY3-Yw(G*xDo| zy;mpy624&O1nzb+q3(58CdTE^?xq`cvV(ZkWGXba+=fCq*V;U?XYIl$!s}0z>~qn1oG)O&y6Ke5b-5$IsQKl0mGc$@-p?P)gO0Fmooll(?6UHoYJzXeN$86|%Q{bV zn2#k7ZHu>LS@{rSkxEKuJ?e#)667}-|1OnGE0*g*KBz^O`s@(S#g+TQHCbhg#UDld zOmO0t!d*hDg55c43#i?o&n|`)0Hma7lIFZ74JTVBrOrvAKmcUZ*nY`N0W4 zsjZ$YNT9(%L%MdB^Hsko#Eh5u*lqEz?7N}H%RENsX_WMIjF~@uG&}lDXOET4{~mes z@)8a)#Tzm$LuHH?&tHT+b>&@~_ISkzBjZ@Gz*P48>k(Ynne$$fd*b5B+_qB);mA*SWKR1)I^B(2wTA8Xats`NBz6+NSZlMCTLd%AG=v|DsTpC zKV)%O&18$PHTK-DNvjWXt_~8eU#_-j$RgPAO8Pg`MzX7*>ClD8nu8LKMMHg)qEzU zeOI|Y*VGvCX-{*TKyR7jb-N^dw|UvTx$nu-svwLO(a#z>=3FJS{4JWVN_QV-M314LZ!S-M6Mb60SxmIbB5K)e7MZ?M9qzwY zZsUzvRU-n|oB`K0Adip%#1}T!n-DzA~(;(Db@>Uusd+>a{8jGvf8} z2zI?RWqVsUdW_HEb+{LUo(D1JSt=Pj8Q&Q9Gqr^=szX48El>MN zme+mB{p+1UmCUQ&E*-_k%Q3>KG$VOI0k=!y`L9go)5AMp8GJ{TSQ>Nk@OU1~H{<0f zH?H0hUF<$|C zV|VwlyTuk;jAIAf^Y?!V`@#LlG0qsA#b$qFU2Dzxne9THpiEu)C~AuKtS?8%<6aoD z>WRo4t_ka&%z_o#IR4%a^*6KOxW^o0{ll1bC&Y&l4*;$+u`^-nX|bc+zW#0f{o2f_PQ3g5d%2=J=JrV!p!sqC{mcg6Ku`dYJfQQkX@o@5P(`7WsM z#>}Pxw&>X40eGats4Fnlss?NI3*bRshOD1Y)}NF0^HnDFgzD(d=rNLfsnf2wd`p5! zuJ*8tD!>mX=BT{&g4@;@bZ|FDuc`F8UE_76SPhE^IsE?GBEI(|A2@=1#G%GG<7&mM znIg=4?|^vUV7SZ2VdG`;wkMV$r#G`=KUU!VmNv0Iu2t;YqK49k-e^js|6k4(+OP9r zujhGor<9Lk>MlKOZyy4=+d3%Xxnm^vB>S~(c!qMp z=QoL%cEteVOch2SWRB5E`kmf%#KF)vV#zQ=46lhpqmwqI@zhgxBbR@x6IN7uqb(~F z+Xm^Nb!;teF$eCr*K8y%?two)&x@ld=4Mvm~Bg=|;DA%Xs(&#vB{sTDi zoKkR;*V)nvIONG;^y;4?+nP+`uHoomNG@urJ^VNEoa5sG*Rj*kt1=xQOw{r1Z519y zq`-7#9nN`n!M;bW;y{)@}1`=wMU+HAdCmjgxIWu@bpUbU|!-S`D#Rc z>4H@~9YssKFmhfXEr#Y=|gs6GVa{5gWMh|GOLR5q^uNs^4g)` z*C}z0z5%~J@#tqS!G@PE@Yq_29$Tysx-fb~z-b33^m3a@Ms_}w2UugQb2ZYQ72q2C!gZ#%#WK^?V%>td zP?^6B!!(#{rlX3{Pv&5qmN6vHouS6FK>RvYd|bPb%+V73iCBRmrJ>k;aR9<6cHlKP z1Z&dFp`q)9PS^7A%ik5F#sxuzKB;|!t?=)kVsutb!^8X3>e%*0*zFf$>}_(a*x$4% zXv45CeKXV@I`C|&cG?9$$7I7R*aG`@&VvoFqvYT9sLyW|vu)di<6;R~UdQm-(#QM; zTYOC|hOsj9nXN4? zD@YI8Kq?=FQZs!R_oS|?IR&mc6=>?+2OqaT7t57QAlE(>b6T`9agQ~=tSm&izAb{i z9O#{hL&ZP3*z&p-hn{7jS5TeI^)A`B34PE1#31%pw~mM5aibxQ45m*ep#*kTZuq1< z14>gf5W}2{`0>?9uVP;lRV#Bn8{e*wwA>Rt`%3U?1Nj9>I(TEn=fn~E4o2A{@r*0x zl&9g^6AjEI$K^*uRUQ2;z|iyqwf-uW7hz7gu!w#*SB>+*2Z&K5V$ zdScKXdhk?CFtVi@hdWb4+O-NHUIS5(|6b_%X`pd@04DXA0^3k$_)Uqy*Jwvfo~aGr z)NJ^1Kh~%_8*w-2JwH`T_D7p2IrvftO%r^x&cSGMorcYJhGxGqXrFV!VS^bcvdF@V zdQ*g~t;f%4rC2|s9@!f^;>+qMBF)+y-z*ET|EfAJEauE*aS7VY93WwaNYuA13|BE= z-rO7vJ6?o-59i?0^DcP2_@XG&CxV* znelu^%43nR7IgP(*np;@SY_Y<#B=jPG}qx@xo^+YL3! z%&!`#4V`^K%&suxE{{9OeudnJ@IB4`^G!}F+V1M(?V(CIn57{hlXKTvMTC2v5@~11 zBnrsJ>d}+Q@FR<(umE3oSfF@_C*mw~U^0hH5c);0?JvQ3)hdk8Qo_{9Uqp`^)Rq77 zg=sT$#;)0-Uw`Vp4({ zPJf?@Rdy!0H^vqNbNPO6?~DSw031D*g5JZ8;GI>D_hbIISF6OxX^Oc0woL>s(7>tY zKpZ?pKCQAl>gE+-%X!H<1t|Afx??{c%)bVC;t)_P&@ zZxgH>=Zve|rO3UtLHeOM?0d*PRwp;~8CZc;&&ps?P>PV&UU>QIj+m9tyok&|6umZr z;X-F5>lL9a*qog8>A0Gji1qY1Kk=@_lirn3qUKrFdi!g;F<1JrP&z9?E9)JcLH=oh zC3dz+@pP3P(x zQHpCdcF4~O;Q5i)zzpiF$oFbFSPUQf)vjOqE%Nhki`cX3D76W|*vH10)t~o|eIXWb zuam~vyvxa%DB;et=eIJ>FN?^?ppPZ3Kh*Sh2=|-DIPRB%ikAWtGHo#1h&_M28P<5l zVbywaqSkw1!xVB<*&EC|QYLe~7vuJdOJ-|C&x3phZlcC-k{)L7wnmOdAx0OnH~yN0 zzT0MD65pFmpK8&Avx#9>DsXVvCo$~)VX^Lr2|`CEqL-p6o{YCe2tA2KmbMuEAPrmS z9lE7y3&TJuO8S?fcOm{Uj*=Q5$vYKr6tX3+#_V`Z!-lIF*u(#Hh4^qZ$FBx`YTF#g8QK}XJEC-Hvj<=iH& zXnqzmuIRvKydVBrGbfZgY}>BIhzudWu+wzNWksWZkuhS|m!UWz4-w?K)K+#xVcIvL z??(Saus7salec=<8Ya}Uzf-e8<6Lj}R>WZ8AR|0KSB?vhG7taW{&|Oe~hUo?hN_(f{BM@jcTI{hOJI zo}kT)QA6%9%b`Q;U-$QCNeOW0_g_K%4x3%Ph z{x*Q~HujI*sg?gseVuX$PDiFd?<@B&P0UGdR|=cmvt_O)>-X2aYcDK0c|jx}*5hn6 zm0koLw0E&WBz4{IoGh_@xI6X?OT^46#u!1}TFYAQ$4*p1*1xB$^(J1rEH*qpA(S!_ zv2wsX{7{~N93^|S3#!1~QOp&BD<)VJ;X&WwIAgL557!n#fBkYuQU~I!=6CU!I>xTv z2~f5-!ud!$tlv)d3eN$X=2rKdjlvc50q>tp*0^lnpSGVoQjBj3Lvk># z^vlHLV)EU*IYXRV0Ji{JByRVENnkuqn@Nztz9r;k4xW6g#P?8VMLXrUMT7wTMhL4OG^9WB6eMyt(aw;pE8< zr;g0bGz_}^lMs2y0z3UH;OItwPH;KQSGPw=2RXQ$nheb`&e&XQfJy(;g_Y&uzd{Er zyc3L}p$SNP2rTrgz{c1-#8s8cT<=PJ117k{^T!SEXp zi-&d2aPeeLHZ{0N7G$v3|w0P{{$-x4{$}83E!P1Cdl|y zh4CeM^Z{4NT<=^#N8IeRSIppjZhI*V`ggSOpv?lB&3Ra+=YTsC-0*CFHl~cxL{ni2 z8Z8pxKDQjB8`|St@kcREqJ=BofoM5Iol5|@S#R>eBt^{V$ItvQ0pW{HnCZ)WSbl$t z$e&my=?E)h?&GdYkkcUI zqR!~pdSC3X;eM<>l017OwCfFoaZV)6ED@<600r|X96x1@7G~|H(U|H`}M8_MTQ;rlL=)E|;i}`;te!jAe zWVd#BFU~MmwqoxLn7=Z>znoild0B)N?5PK?34y9YDju~rM$G7Hm{SjY-dpy+i>#kd z*6%^q&nN5eDQi82ZQT$!@wsT6qlP}80r=z!xE;_fB zB47%Vn;o zkktp5mwyna*65>H8p&Cp5ytLd{>o!AF&Z7=-aHNGM$AI46Lm9}s_8RgE@Eh{%)h6s z^*#>xAr|$1CRX{VlXDf0^`$zv-b8K5ojmxL+oEk{AoSM; zm2lB|yQow$fY#G&th~oP?pkvg9VDMG%myosU9s!GRJ3!_q_$*=qZ#(*k=;iB`>xZ}2c7DRGrPMYMpq9; zdfZXyY9Z|x{eO4!F{#ZNpMBVoP?um%UQUhK99&*Wt&c^$%=Ij&g+65WT*!Sf#?|0h z`e?N{i!w){4)rf@Y_N0g3^-&Z;2eD#yEs?sPfegtel6~f9Dw_qzKRxGUCfgN!$g^R zsF@y6?U4n~V)|{!ka;{f4VCoh*>$gj{K5jPJ4!#EjP*v3l1Iji*J9RY>Z`K+V6U$O z-F3`Sp?)N}fqjg{G?cZZVX%i5dRJE9mjd~9oKFSa?tzG_KZH_y4Oj#OW4@vR93I)C zpeu8+IFG({+7thhuW_MKf}uvtZ8XV(uSpHwrO9JNk7q&>tcS~eBQbG3=6YEjdf~!#1ynlep?`0C+%3^Y{|(M4p?~G; zKpVuj_k_cPXv9@pC4xJJM?hE+LWX4;(Hm2Tm!M^RKnBn4vT`$6MOsxoC zsM|i|FbAg&7GZIh2CQ-J3s=LN!qZ0&|K3f&$+zlwZtDQ=&>Y@FQi*JXH?s9!$d7;jVC@-f`9ddl(I;KYjsodf!il&xQm9tTRUGw+eiimH~Ai za_<)Q$H(HAV$n-O9Q>Gq;j>Y3 z|CflYXcD^vH880>g1%=1tSfQE8S-t4O&qbIaXQY%q$0{u;JTj_iI>QUaFb#O&ko*h zFT~R<<}*jcFelmw&vsiuGl+Y)PY&d!hoR^Jb2&R2K%4&edA)Oi+0`=FlQoBU+4f;% zS{@ej(hL#akcV~D9iF0&Vt{=yR8*vc?i zC+?L$6sG2UFVv+XtUY-;X_nYzP>N4)?cwecjf3qHacH0(=5(!r`5&G=@7F=a?W5Q= z@rLLYqK966Vo^(7$@X_P%vj*Lvb!_pF9<`&pbRue=;3Q?C2qx}WAQ)LGV3_U^mr%S zKQ)QfCVCh$C>C1|XrQ4F&mHWCyJS0HPk1Pds6V?f%@8^BcpkIJ!?n4|t#FcItKdn1Hb@9<`zz_Z3O*i6X8W}Y8qeV%hhqg+-ju`lQ?JC_ zdL5jPi^26H+EDoB%$ajOCST;)G}s>_lQQwA#SqnNYLKx%7Xh5pZ((1jyt!3eQW=k? zWz0&wXNWiFY|Ti~m0($2o&;5CVTip~F3adH$vFXw63dtP%w0J`(pGUx}Ii8Zamdrx#xxAITwb zR>(qMYBTp|2lG4?$1D;(+zpk&D=QV9lB6)`(*cn$_X{t3OZ1IqmXhftcsn^D^>P8e zH`^e$rw1zN$8$}P;LG_sG|>n4Eut0{27g77UbArVVvgol58U=KMAv9*^k@I#xEm0B zC$@EpgvBj8>}^+q_=`pGqW?@*9qx^0IlL))E0)}yf~X-p2ZWm;BG3i1jEb;eupMq) zzMDW&YnO`bEFqumTJWF^jv8bzSn%Ji-~dtGv<2F&<4H zsE;^RfT`-y*nGJJqqeq3;)t)}Mu`?STR9<$ewnQC?vT9Vd8U;6Ljzw(u^@x%u0t3A={Z!9iw9^lf6tlyAy z$Um!q&C_<&Ybs#pF?C#h&--qz5oTmE>u&`2jr&~SM}2vuLmbMk=wS?Jlw;e{@a3lz zO~>V-edNCQFi#5w2BC0TXN2GL$!FM6fGg(oVCqjJ+cF6{vo&z*b14k=#N$$|RA&8) zto3Bw%b3LXM#HtoBKm?6zUwC9)9opUtaHbWae0tiL_am(5rMV2Xm1L5i+XBM3laZs z4f8-{3Xm4vuR} zv9L82A)op0_BJ8OxGJtSYa_xcj9y;}+UDEf8qd?4$wf-t5((w>1iXD^iCzcF5am;d z(DS8`wVtfp!aaXF6OH~ylzC~AHRXh{?8_ai9Jv!9|E=4Z@kTibPaU){c3vf< z9x0emM?ZdbQ`!m2zH;cn zxqh0RBW~B4pq^RNAN!K)aKr(=9m4Tpb`l1y2ZnKea@3ie5AtSY<(tY{?^f0rNR9iT ztLadv9~gn>?@|z7${e`m^$4QQDtQ}sy{(bZew2u&iv`GVsY3oeW{^0xh)uuv-TzWU zmwyc4>7k8ZJ6vF6mIujX2iT-fN0o6hCSIa%uAcdlM)_nYmdc!0FwCT`{`(IU~O!5CirWLt7YQ{0Q=5*ze6C-*nok4)iY^5?Zr$VE!=+x5$ARrQ(ce zb$X3^I7418821MA3|4A}cVlX>JBE2SBWuXsyDzq8{1K13nZR2i1v`#w;4pJBj7y8~ z_^cyT?Za^HLLxIaCu7_3I{f`N1B2hp!A0?1G#!5|45#bj!hHIL4+`usHZ*R zjxB28$mmCJ={C*}hceGbB?}Sn>zIA_N!&X2S-AWj53@|_;;M|1LOxO5Ql2+=IALN- z6gGTieomYznSd2ogyk z<8UDR=Z$q(!hF1i&U0|MtruQxdMXZ7s$st=JqzSCRG&3RHqXuB6}IHdSYcq_Sg7P^ z;OT;L#8yS)>!V71c6}|9-#il@VOkj3KM4EY^6%Z%8TYNor@iltt&8Ik-k1T!2y?6( zNZ)5e4koOwLQDT%uuXX&Zfey2KVb|;6{g&FUZ5h%b$%Qt&?={rO2$t>uZ0VVsT@V=4_^}kX) zGwY3l!CkN$Q&1maL)NGvy!w;pK%V8%5(lL8^un2~(agX!#V;2rc74i3V2l)Z0=mHI z&@J(v{lJRh(>Ze+53T)9IAN3k`IkV(Pcl~YlCb`)2~@~yYJHgjl|=d(JlbJK#cPpm zz+6M6nV7{{#)xcNTrRJ`xqHsYQcFP=b9egRv%rXS`e&~dVvR5Tw~zZ`rQ2PhZo@o~ z3pr>vb`mrz>=EIUgH=Zzv5Wd*2cC&6j;LYRf;mvWoQxR52KXtQ5RLa%2q!Ne)IKi2 zY6m+6=8_-trkvN4DO&53aXT{^@~`w^dAkDL>htl9`|Y>u{)iCIuVVVsDL70GY`390 zIQqa9n-}F`WvV00`vqd*gjpE9OW;pT4TB7Fur!z7yJtV#{Q5>*A8mpUXH(GrryewJ zTB2}b33Li=aQdPPE`6Pjf`z&mSHrxdK6yCRPzR44gJ5s@UJNguh|F{+d}qG+Xo)Re z(8s?~&I(0crXo*aCaOl+&~sOYF8}63X*D(UPCfCf^_Ot8RfmZ#^Jg!b;H1{WaM)qOmrFh$u z5kjsQIfDE7-zU-=pl%d{zG@X%|563r(r<~~68fLxng6?17in|tp*Dy6@CEjGs&0p` zJ;={`&i>N64t5`N@vwItQWWK(me?KNJB-GZE6m^|OLOZM>I+;8p|sZ#9|~hJ_%!De zH3Fvlig+(m=O0*#GSLkN(f7sjMH*yD&48`41YytYVP91UA2WOW_4OwUCJpV!30!3U zpEN%o<2Wa(KH3gN)Inr&{uwqn7CXt?v6$cmJ7%}P5;nLsmYmXb>f?)Du<39Oq~&}E zJ}gJ~i#^CPyDE z$ym?h4E-H@+l1XMO&p95gw+#WgwYqCH7Flhd|xl@>W}M!p3$+q9}ZJf8DEGo1EjD! zFNc#&@~GdehqIG>p}0&Neak!%$@lmVYQ16}Mv!M7&&&?a+!g6Bre~&_TFo^Lf5lw+ z_ri<3_gg7`Xwo!-jh6*(zoRd5svSPHke9bO1{NnIxIMHC^Oy~mb+!W6l->%z%OAu& z`V_qbnJe3IGGZT1gb(|vM&JYS{dm1+5~<%b$E4(+S5(+H~xRF_uyntzQ1k? zbtipDsh>2~*1-0@URa%$hcR~?aHY`$kB;V{T}Mk?n>8CV8;anX&AeS1Ki{ny=8~~2(>?7 z#7BK?JdX^-c6V(Y_{;suA@Ym|J44#d7n9eJcVDE3&qJkH@F5Lu^{mQas;wK-7+H5-Kjt z)H_Q(fU^!tcb;#a5AU%x8iSn@nh z0{;aBK(%KYbcWiasB|{Qz2fe^QHnv|xx+gyhsRZ#NT2M5veyPU^vVgYEzGZ4YmX<5 z5zywGw%DARa( z0obTVrd~Kk)1%hjn7%;&D%4vQW0^ZyE${9LjW-X4eZ4v!e437p%m?aTXovgE)YLDu zM#Yr`JZVe9#L?s|NUCx9U>58ms_<04FYc+fis)VwurrsL!VZSqh1(+}B!{zVI}~>D zAiHrU2B`^*tu4hHr3{Q8S&FK|qjBTvOR?MA6lp;bNNzJk>umw=?xo0IWe)ib4!Ax! z2Kn3{8LuZ#lRCcug=*xR4M69)mqbOA1VQ$3u&dTYpJH=Fk>gU+!5Y2zPRhBJg5Aq` zu3Sld@~KRCzpX|@eK#2S{}#vQOu;7V65W~K(oRZV+P55>UrSx=15dp8nTSO4iB_f8 z!K_0L6h_wKnOj%vjO_#opP$KO_{Eb`G)Bz_Ez^ooRmt4DecsrXn1iP75{Q8Lm@|Mp z>eTs&y{v@LUGK%i!|K>#&-@GM@j2s!&bj$`6yuBz&)jg)eHJsD4UoUO9{LNi@N#cG zOf*}Bw2qv0xhbg4aD$P96^@y@BCS^uj`LrIdOlm)=iqBDy|Jd`GW^Lw8~3+|P37R3 z|5cboPC?D`KwMAI!;)ABlswAEmyRx26cK>>ezR~TT_2ZTNYPI}6RLc#^q$t6?Ad-8 zJXZ}n<^ef@CQxg3gYL{iTtDrEKUaCCSR9QW7It{*SB-w-$<->7LcvTPi*zoC`-T!s z|C5D}*Yt4grxkw9Dn@{@19bNU!tvQGl-|^bQWAYm7xJLGxfU)NeNp57K^SeFgnv%E zkm;xkP4aNu=cQv(mNjC>IO2s~9Fkql5Ux>LcewS=%?(4l$Hf>BhUDlW4+AvcE@&sS;`|Z^Oy$QPWWSd zg9Ov~KFJ=={TRIt=Ueem43+T~!auEIA{k zw{3GTd-QT}$KrXZ@Z3eLH95KkD|7KWzY@<9d!S9PJ?>~|;>8+2tOq%l8V;CGAMHX< zTeukeVaeMVOj`oXdtD7ra^&;YRmq%VBx^lc&+m#~hG5pUi(=VR>cjn0aNR%$A$-oC z*qVl;4%TqrV1?cEP@Mi?3%}d+)5+!I7oTggK3`<5_on2g=)39`dl*wxPtC@!GY071 z%Lr2PZ9Mq?ys!=i zwT^J0KJt!=3-sedxc|<;E9$7NlPd5hG?&lVO1yMbLDrV1qW?hV7v)Yx8?TpM+SHHj z%*A=?D(~`~5;8O%J^9|eeCB`Ga~^!c%FxoaA38n%AsR+Z#H>_bjQgmL9``xVSRReH z8~JmZhG;l315w=DYt%|1Nr-}idnF=L74UHQL*ZzpkJDEov8z@S^SU@-+WmZNO>)6+ zUe{xJE$4He|C(MA18PNrzSPK^>#RS!6MhW7FK$m}u1;4nX_K_@UEUgUXXx!RbjI05 zF0eLCL;SUg2=QYscWOG$oSTDVT5Y1M!d3DrwXobF3Ki`P@!tR&B(^ah!Jd87^I&W} zlL{Yi^0c|X-FPk?FXhOyzR?5Id%h3>)N8CB5sKdG`gr+JU}UEv>dUOrq09rjH^!ms zFCFw;U4hHjGMP(A-M5VUep%}+v~Q2)KQ4<^2PF7DG7>2UlhNAhjFXC4=*T&dYAiF$ zt<%x`&J5r7NKsC{mA)-=s|R(1+THV_?-gq#X^~r0pof*r`!FA0j+?ya25=`aVP`7- z%<(n>oCq}}7{(wsfUeMW^3!!O6Ut}P3_oZRk zME2?mHR#3NVTTcADEJ_ccVl0RFmlHKs7K?#5eX#0UQilYh*QxHXjF~Ghu!Irb|N2u z^UKK#$Y?)F?pnnc(d_>~xKvMpUbPq5nB+5bx5tFqJiI06=~8$E~PAG;o5MVJH?~ zOT|}eg$lZr!HE5t34IW<)?0OAV%aS zL3NBf&i_}5)UIV%C?UIuoErbxd&LMfT^zDWgC}zwn;zL?eta(KOLa^3_Ceym=ft{)I!Ngj1-)^aWKGit7RLRp$Og{i^tt0FkW7U zb2HMhm3-PIBYQ!o=Vjs2W`NQeNsu1WfTnQ4jlQ{P>}L=4=}t%?uVd0xGo1WMf9bS5 z+*@9a+k?8IUE@`9SOvaLP9f8TekTt{9J*D4Nq?;owJZ=reKW{dvSLPC9eP@GHW*SP za~_ITL1#qS-V*=Q_i9E&|KHqg=?rV?KMJt?fGy0@rs88j3f?{8UXc6jnu08>Rjz_k zoFcS0{}%i6w9&af6uW=xlH=+Ev(fo@mt=!q`}}d??o1q*&E4Lq8gi+)A7VehU-_bl zI@%(14={h(X(p`GCc$)>BQh-WP`}a^;a8?ZIWH021{=WcY%Ml?VjhSId04)u#oEtT z#bwU;|6uZ2p zU`CKFQnr}jC^h!GP>6Np1xswAFi1q;#(Vnd?-xTFkPo{n#WHicQ&y`$!u+^Ka=jCi z{jl4ObC#KI81y6`w#=PS{?`Q?(=(76!3<y_GjN?{*uMf;5n)xIPDwT`=Hy6xw&U#GT|ygv`jnCc|pAw>l5&fdzsdI)wn=j@sBapGS`cr-2onJpNZH#T8Js}Lk^#Z79E}O zvo()97ILXO1)({07Q!c3;WN1m*QXX@`TjDQ>m^$#K{fQR$n7=-g9e8~vDp~r@h*tu zezW)wUpI{zm-BJr1dp84eR z{~A+@q+#h;Z%JL|$0tJF=dg%S(ng+RI1cJfL8iV1PU{qq)oq8@KmDLrl!Y-NlW}rR z8SZnoHM=Ld;EQ|Wg6VrvdTTrshd83Jje7?6MvEsGBk(G{KzqVqH75~|=P_e=P6^a9 zbNCD)qesSib>_-YjK3?M{xpPcb~dV%)S=Vf0c$33A28GrF|MXKcQ=hYbZu-HKL?X@ zQ_yYt9Bdr*QDjvsU{QBXsC>6T5_wQ9?wk)*GlyfL6LW0>u(NkOZokxo|MF5~A{k$| zRmgmPm$lx75xsFIsT;bhjlq9K+E^Q40h5o;%;ze^pPTMT3kipzPZAtXTj6;b&m5!k zaD6z>fiix+j4?{oWqlF*w`oKEzhDeMZGdh^tgt4c5F^4IVWZ-~I?0?lX~X5U6uq6& z@qAs4%zNhHDxJ{1>WcVLBf*~Kp;*Itk*cC48b0RZ>QZwo<=My5U?vg@jA7$bP8NPH z{@p^xjf{KkN#{qPpZX86!;Jptoz8f9(-<#$+QW+3C9NCi2|wqHAv>AZs^SU{X(itA zouWool&pJEi+T*J>|6Ehrr@Y>L$F#tUOcqG07ox;4Pf@f0z1sfsDpP?3EXy8 zV*cgu zXwthV*2QYEuMWrk)AUtlm_j3f`3PBLX>RvG_f)dFM@isST?)^NEQB8}!|}bh1g4Nj zcEb?Twan}y|HZP@6OA46@ctC9>)dEuj3)!vM!+m`4h|hDz=io%wK5yqwh+ak&>n#CQ5==FmUDeb)KT z=18+FLhMF+L?5t2$H56$Tta`@Eb36QlX2=P_wV#UcQ0xa4|7K{cfk^mn$6(Be1pB& z`M9~;9xF>j5xI!^iAOeg{BIe&e8>q@Dub-~D-LmYg~^UbBI=MPI_pMYBsmi?!U{h&xC)mh%cIB47vlPP4dk_V!=LwtSeRmm#2fU{ zF0sWSYH1deE#jPJ1^4wO=)RtN^yww2Y*2#QrW0b>M$S)eXJhnRUDzzOfHwJA|Lh`X z{gOLWs*=&f`)k$``V)0>vE#Gs`LMF;tfWJ_ph)^wl$_8&L>e>ZPUxajiv?Lk`Ouqc zgKytGab!p|9#!aJnolL1G%|3Cb1mQQy|8xUGw}kNI5ag9zF#DGamBx(U=+*b{H%xfj7Rdle`Ldue<7&6=dXJ(Td%DzXd$qA;W z0eCYq5kt9W9r~*r`>!ywaDf!M4V^G0=9;K-=kKW!0hMdIFv;h<)hQPjjQR8KKKSf5 z6P|UZ*txSD?JBadd15&}?t3H>Z`>8$t|qv}jGI|=j4(o9kQ-43rMcFq)0~BYyJN{p zGW@?>+27#>6rE}oBRJ>VTs?zKDlPmcX9peTz`n8}hpE96hyTRW z!)c5uIrP)bAnV(-O6I-2NoIST+qF|z@_GH4yU+&)5`=65eyuJ;Mj>;-4uvD^0MAtw z5@rWg!j61`p7hl{A2tHk;;$HeKov^Qyf8-J82eQ?FR05$%nwKCQM+bnpM>d4O|Wxm zH74}Rz@C5U!?f;(cdZwNcL4oWUkmU+Srv}*zSugCI;G8a(4(&|DmEX*x2@rCJrDDK zlp@Zj7FG{iM8(nPVz;g#mId%x@>v_-lmC}*PzdJ`XN=t%fC-<|pi->Ce8mb3T$GAk zKdWTUo0qlT%hLl9Kj@yg^gHm$eSDbK0Y|4C6~31w*fNrH1RWie8o1*b^GWVrbH(ZH0T^Lj2-Pm8NHv**$`=Lr zv%Ve&cU%(_+un(jtHxtvq$_gd?eTS-1Kf%dF@*1n$emeG9XS)@jOpE%k}DN96P;aU z@8{PTcZ0&JOX9!_2~?ZX&{dgw$9{jx%{yBdAjyK!2pH zey;s@6wz<)Z&A~rg?{Cpc+y^i3x};SGL>Gq6kE)7^kMExEIxHL!9Qy&;KQ7s3Xe*e z^Au(6>txN>mbIR&dQ4gC$(qA>YiK*{A8=M^@%i_zC>qC`jj*lN3SN?8#Ot`hAjcn% zx@18zntAK&=hjS5Lhr;Xyt&s2>Pfdm-YacH1=VBB6hO+uh1GaR+-lS8aD8mpfy0Lh0To0j*mbm@57+3q+V2dC3?+Gb* z5NV9a_)3&S=Al!x6z?45@&3ePaU)NQ8e~6o`boW1gcGnAXDD;;R{{xTm6#|^fzuK$Hr00zO|<-QmGTzdE6Px%BEram<;Tl zYC;bN_r2uFofuswGq+yW@1ciBH{vDeX}+WdUj^!2H1u)unim{6AG~L1jv>#&5y>-B zx#~3JPOieC_w;@?70LX)s~_uw{K@Y{;oFJKw6;Wgn=URDa2C-y1M}M3ATH1v&5vhb zRHzP4)D+?5^BA<6mB>6-bRXC%TA!a6U70h5js@@|uexi79oekR=CpA@$FOjmqUL+p zJLCW3llFHhK%yS`gEH!3Wz|E!YW^T**54FgUg*JHJr0YN$)Q_ljiN_9SMvVrye}GC zIX}Czhd!L6^xdYU;+IVgbl)h!e_yjW#raSweF%#pjPSVL3OiErp|;q9Ip=H z)v1J8Kn|9TmZIHec^FDNAccF;&nEuJIcEUfU9MQ+#N5D_hFD{EsoJYl-o`DrH6Y%G_J6d+lf@BN#ABuKJQO?8j;nca1cm6gg2Vui1 z5x-gihwI)5XU?k>jLFJk{?Q})zedvU+_jSWiNS%?%%$OKZ!>GgA&Z7?qczk2z1$-hfThY@aQlPzAvh|d#uI7 zQ*!vy@JPU4mpac7%+!%U|A0A;k!N+|iw*82gktN9Bx)G>e&n2C>)9NvQ>aAp+YV4J z{2^X$)xpC9({OgNggLfOD2OhE$_WSNx`(2CUox`k7p_7CUG?`FC< zaurz&0UBt|v*N6=1nWjJH{v~WF;*rcgmWjW=j4=B^8fcw4fK^1k+kKP7;U4D!F@e( zppt&yj~=ix%;vt%7V9DcvFu59L&p!z$UdYax19Ccv4K>FTeNkMKaful`AHzuJXOX53wftt+@DE z3uU<>@au1Y%NfiW>0XG(+g#B0A{35|sZj1C;XY7`WYtW3zrbrvMjhw({+-FNY!*#f zlkvsVml;VCnEBEtwIv@m3gjv63&JGLWM<6(U)svCSDFXQd*w3wBCqu81-UObg=t?S z=9W?;MvdydW9INWBt>y4`Rui^SSx0t!v;(I(5-^Ddnq1HtAV}rz4-R~iSTJKhGKCF z;-+iiT%{cuF(vRKKPLKK7-H^ap!SlX&T&iuyV)bTE`) zzP1C@_om>alOwE0nsc9$ga?VHXt*TBDKc)~{YRg^=mmwB_{20fLiZ8NS#GNY=1Ijex5s9K(EYSUBt0f-K!A ztm$oxp9AO@9Y^0zb2XUK8Nc;3o5F(Lo7AJOOG=2aL>ZBO=U9I zdv$y$Tn_##n$)R(Ss4v8R|(cT*ki>PW|8_@ATQeuo0^iKsbh!qdMUo`ETmq$oaZei zY?}73_>^RdrctGMvS2dSPq&9T{W5`f?U4DLyO}L%Xo=FmxX1>mZp)x|twCl!(a9Zh z2wVM1_*((A|%faUJHCU0fPJBr% z5T8f-V{p_GG~5bA#!L%zJ6(#bXj|lNs6Y-qurUXmF>z}nUV7zX;_Ri6wVvy9}C+AyMpT?*zPesY! zt74nG3BG6L!O6se*?#|zx3g@o>iWJm1b25W?htobH*q&0BqW3+ScnrN?rsE^0tE_% zwpb~}-QC>@ZoxJ1%-{17&Wr!6URQgau6<7S-fPV{$GC?r{t@{cvK-5R4wJ-to+9(< zLAa?>evX_%GrKt&uczsJQ&VoCra6A$DUBG?qdCo6^`~EnG5#-2h)R&Wh(E<|^45)* z2IGW}5s9s>C4aUo0N2BlEEh z!YN%a)`!|rF>IP4xp_xQ=$)HNXtm~ZAWieYG_CtJ<)bWI_C>vTP_6FDd$(xpI1()N zxSZ!8eJ#ngw3f5@8hGw zkAssNzdxN!#^OX=nj7%>Tm?~%nXDZ$3xic{ncU)pYS+bpZnI=Ad`;f3qQjkcPFTZM z!V~=JK;utIOc#zq#GZ2AuTJG$jreB9H04;$GqqoQyIxP@S$y4y$`+&X6`f9YqzhY8tT@@n2^aB!-3@o-eYX%^j*Di> z?+(nA-}sP47OyACSy{upYe$DVSSGwz3!}A}6*rL#8#5A*IkP7=o3f^!>{%E{tyYPw zlH8M5*F-}PM99Vo94@yW~*qHe(KB>!XwAqsC&uHPK#F~q@S!Rd^Qyq9S&<_j8srY^|!nt1| z&7UM<^G`9IYBk{PPsuQ@HIk`+NZq=`2(3H%Z2nO|xMa+CULVNo>%u$2jOW!wG~b*< zuO21TyVQ^m7tScL332FC3SqVN*e&Sc=0|gRk!Ov&mJfEP!tuZu&&Flc56R-ut1@~X z)WW-7EfOy1G4^`^9WBIFoHRrUEBmyga6Aj#G5i`xr+SHu*kvSr?+T7rr(z`eRhrf- zWN;tc*=8hAPC)Y(`g{}?d>1J$sd}5m(tQ<+6g%7)Qr;)W^xbz!}nDsUD|M$N&T~E{ej_flJ z)cNBzYOJ-Hun0pXuUi{4YadP@&OkrJi3g$K+}}EbqJi@MygiE~IX^G{ql~&njc{yq zRSlYAg12y%3TNtL`-^bF>~e6Hf4)ib#XUEgqAg~;6`w{&(K+~ZC_~frKJ5Lfde^Bf zna5^0x1PqMk-DTxF3rLi;gr90V{WjYXw{<$a?+!sO7b_m2pdU!7n=6@tb6@Xdh@E) zld<}2Z5T{WCj+A8JwN+&7Tq_y;`}oZcjI`*Z#1QSekq?mrn6t}>*wl=$~&(`YONtm zlsvD!*UZ=vAUQENv$)pO1)m>N>C`Kn!DZ&OjxWY&ZyMcqiaz;VExKO1qxP-P<8#=bL zkUm7y{q~F(pTpn;N4``!@_b7euh5qq?U~$?e7CTnl^U-Xa6k*+q0iM)6X8952;f^i zGrkCiZrM-Kdw+4}^x9zRzmCRumpK3jT^1tPt&?TsYiP<{&}d}M8}>~ z5y@le2boEpje&5Ut-m0l8$ zmNv^xu?w_j_GR(y#<{Zcvkk>h!%65m91{cSlh2z%slN0XG~8D-&3n=G9C9%GtJ?JR zkt(tqO4Cpe_BAu3vv_J&w-x@`5NATCPUQDD)9HEDgukRORk%Nq%x?wc_c^Be7oJq9 zr6%0+Powe`B;?qUE_~(x%~cb{%ak!Y5vvxqTz*zbEFTJxgGnD z>`^Z~Wo})Zh*Xk1+eh(mjwyRvOTNhdbc!#OYrJ0gg&*p_W^dH%ZNr$i z#gl|qrhF9LpvQ+y8eDUwPU3j#-izUBl+3Z#=krajvuI2an(D|3U9PH!BMz%s_arxA zUKl~OM32)?a(=F7qczxx7PqC>e_FH@U-Y>DURY22lW5+pOyl(yd}+yvPVZIYNduYF z#D@0iqVH)celvfWPYicr<$8D0M#eBF(}EWrCDZv~Jnxs~X}q4M`FMLxwMZEDSk18? z!qg951l~90OB*|eOMQ`Z$`LAps9YV%;BCUE%gQ7FgIs@?0-~-pX4RUf>SVqJuI-}v zcybsmMI#*8DU;$vIZu2Y%^BI#_b#>J{hzb=I46${;zNo$-GGI2Z>dY7A$lbquStz{ zsAVX+y7}U_baCdF7-7ctkE1G4o5;O0`Ri#aD}H9rLHE>Yi}s=H*K7)EHM6FLaya zihDHN`0CuDZEepSMLp zk|(7{k{|q)A^IQTG(`O^z085(!WP%TT=cYur57KcCj8wmb+~rvq1u*Y&X^_f_~#mO zd9@R(#ouzyPq_T;0;T7Y4DC#~I#TkS2WD|#p3jH}2 zWgwZYZrnbfPM;hHM)vo@LUbklL<`l?u9Su<6YqTCu6Z=z=7Fc`?be}$wDe(}i75}T zCZJgSsV5ydb;6g8Z^LOLzLvRN@>#Jqp6|yC(e%CBdek%J9REXgjgUF=Wj_{49_q+K z;r)nD<>+%aVYN&|Z`5>N`%6DSxbF9^CJ|Pvob#<}^JLXe)lIUMSK5xoCCP{etL-VA zmWiqGZPrZ-!m`H{%r;t4^{|jHH5ptEmVAp5AC%YjyUJGTvA8-xyo@yC@ewz4mgW#q z{$Jr{_mIzO63h>fCI}4=K@jJx%@E8Ee`wto}~L0#4}I^Ncyj-)MAP*&mYJ~CrGWSEXHe!`gm1z(b*JTHm3L1^SY2*>eWNg`=SWDr zaJuifbN16ThDqK_bUQ0r$?Q^VUy_`&By&BqE+tKB8*9QBdVot+|ek9IDI$H#Cx;( zSTbg_44GC(IJ!eqsj6M6v0iaV)(+Ywe7D`R(8;&v`MmMW zoHC6;(l0shSxCdyskqK8Leup$tv5B@H#Ob=G|fdk)4u_Ke7>TrhFCG^SOK$_S@A>q zFuJK_l9S`jT+x<)5-;n#CN9kDwurZjO6fLyfySH#*QcE+zW7j``d9SR3nN%1eYtB# z#HX=LxPM>mxa*_{Xc@tJsT2PFUFzE*3HjPTQ!~TQ1U01lFmcoGRXhETo z3tg{fGXAt98%~Z#d$jbmy&QyvUMRUnS=f0MaJqeS8b4a4?p2!-`zl5BrBX*Ynd2jK zpmZ%);mr%*J5HFpoBEP_P_!TGLx|s0%D4}0=@q<1{Sj+QQd}NSPU@p2J>8roS(vr9 zXU}HgBz{k2^>!CNyr08?L-|~6RKewX-&Nd*hTQ#q02NM>uhh^ET6;5YU^Z<{g%{nGfJ3cKXp=Z4{>B+Q4GgU z&7j41$rFA%i$lV0cI+#9ISs$xwC$hOlK%J9=ck@I0dk;Xc*__RC9ebFp6D8P?{U1p9|diM{M zjOZA)b+P5h_goqZi}&^o@vmseDXn_che2)Yr30xsP0+md1E6Xi6UDgN>MHV^04+h1oJw zGMyGqrM5#7o1cp=QBSyO!pnIh&x5A?>&xTo$c*8=I{uF#CYe*Y)76lA*IbB<%Aw;8 zSL$X5@~C+N$UI}e^q=>LH*K^IcC_K)om-0SZXd-+KQ|U_zzvtZOe6HpRZ(FkM zyZ1_An}ao9e<|VI^-PjnN;$W?DWPYoR8c$OgYB6r^?{u4wppSp{DyUz5L`M+`XYgR z{VuwulZCPeC(-eZ_>kItRx5YiP~*(B@i`Vu)iguWKibp&M;1BqzG&Y{vLIheAM?90 zOV&vLWPOVCRZG~tM~lzdwMe!x#v^GWbJiR4S$r}LC*(1Ef#hbb4Cb`!RDye%{eKSm ziThbBPn3Ch-L|YBeoEawE%_wE25Ba_kUb;Z_%=9)%m3J8SUmXQ=e#hwLbY&tp;L#elqHW0r>%OK%_hmj{*pOp@|)S8=srDU3Es*`K^y$K0{ zT70=Of}lxutes$q?`dy*46`u*EPcg4!WsQ*IA&8s*Wg|xexD-z(sR)C+-g0k7T-R+ zQlDDtp(}moF5N|U=^^@7Ek*`shmD!gxNk&1b)r8}HWpvF=SKwV${Vsy!9G z^vM&=BugoQU%hl0oG!W3b+c#~V~^Lwu{fs1k@C@kq5;wu{4ROVmxKeU;d*}#_^I-C zK2}P{f{%7-^l5BJk4otewi5=}anb)8OU9dZ3Xf)460~|2UxdB1X6h`QGiuT2_b)2d zPWm8CCeqwgc)0oEBl;_wJ!8G`=;uqT;u*ZQ7{N#Jt=ygzkEQ4#G<~0GI!Dly-xK21 zlhg%O>a*wr4m6m-&%;J|c5ozZZYFK&xzI8R7Nx|pLF#}>fwM547f-q4Y_xJ)aLR6z zDsEwn?tRgiUe{y5>XDL*mrX&4JuA#T`1f4`wOtK3Hn>!@iYc7yTF%Nnjksu6Pja0H zF*`?cpkyET*42?{{S4kebs;dv7w4N11P?Kz;JKV5!s5A+U&ym9J(y^9SJ`~f=Uv`- zVq+~N^V^ODzdWul5tiTdG1U4I&Djzgy15n6(KeeK7De0~r$uDZ5w-b+4ykwKdsv$C zZG)W03v+2;?vC+q!Mu|B^bI339%Ynq_e~m4B4rMA?zx)M@sn!rB7UXSaZGkFB=V;) zP{n_A?uRomS0|B}8-`0CecBtBv*~@hFtBH$>3guc!)LXq^#iqX>sG@?^gA@%{u>~35vnQRS6Evr#~{g8YFZ4ds-FeO!HlPxb~ zF;)8CV_ycL>dg?2%t$tDC?RmGnEwCkYQ;%!RLZc;ahK4ey~6XX60 zWZ11J7L{9bNIad*hYMTg7sx0f zLG*4N$2ey_vW69j4=#Z{TTAFz=Ysn0zhkQTe|mITGL3_0%z6G%adB`aeb%ekHn#hfmb$WJ7vo1#sdQJsQh>=JK!9{kcw9O!H%TYBa6J z7~*%Nkb^sBkd|4D$=tSl?Ql-Ly()ddzbD9kW6EUFGj|ug^fZ0RtIzjh!>dFrg=?D` zQiP{XA_GlIG+r-jPi^Mke5m}R2XgS7Gp$w|WBk%-W_xFXVlhu?v!bP2bv#94P8&f^A)AK5j^%v1oz<&iY}sArk9z*4z~S)$dJ( zi?%<9?cViSD*nbH@p@eNBZ@b|OWF|NLe;=rGMt5{wR${}eo2%T8q4=A7ZzYTd*9Eb z)80>Nh3|KDB60*%2l=qMoh3UDI5GL3T*~a-cr{3}3-`ogtgARC{^t#WnN%;AdNa8Z ziD6IGf{7z&y~Cfud-cR0=RnJP>3s7Lf7VZL+9gELp_VCO;?b+pN?}^VA~K3!sm}9m zsfa6<*xE!Bl4iy}>6u&^UC5%TvhJGcxDActmrJ(%x=M14e#>L*<#IM>H=t_kZPoL! zfz)S#j2@xSb@MS&`^fd&a^#UuFv*?M_?+%Wy+-2OJ)cj=nF5X1>$AHShkHI&jqV$g z)@&Nr+8OY1t>m!Z&0=SoJstHX2wyXtKdLO4TECQmzhvPc{zxb3yX6}GR9QdGI5T}R z_4f^@!+|j}7Z*RCoQ)3@j~7jHIGaM<>A0bi9+AZu#upKAy*}qZyi*xX425SV`od6s z{%qyV?tp9>uJq*iV}B+bOQlG1>^3Z(MV0iOo;<1K)jy5peE3v#4bYK1xUqbc`>D2< zaC?OB^XF92;okJ&ufQ1YG&Uhoa!q!+B-3C_F~?RnCS?30b^X(Ds`dQ|h|ouSz4$Mi zW)e{5Om+2W*&pIr^Hq4Fi6wNuor<&MKHl5dNBp82m2I{iSA-{O{g)xvL|3_KW*){e z$2%VBz#U<|71&ziZ8@L8hPm?G&1ZD;XX?GpMb*pK1fS3(N<=65N%BIDR*S#d+noUa z$yBDKGoZE^?d&S)YAo;7OOsGHc@~4&9qd zv)(g=i!1%bgEM)Qn#GUd70js=4nd=T)WKtlf`^&d&DUq;K36{0$z$RZTMi_INY5{Y zG+`h9ky^pWN7Bn3BQ%zqka%J|^M7&TM zUZFhd^;;tI>sIwj@+RhQ6b<#MOinkF`L(qj%{JyS<%l_6EyGDZKb0zLx)H%nx$J*!Kv=&~Bt~R0TYN~rHE|?XKY@X}B==n*S<$ zd^^eUTR9f35EHtH@8wZxCWjih(l*bZ6LS9iOZK^4qNltgp4v?=B`lfyN;U8QT5W2p z&y#7Rnf4HRhuM(2To{ob9SN1QrP<3!yd6g}R=9)1gy)gEwU~GPTC-`?QRV5cL%^14 z#2ypAhWLZ63^JH2Iit_Mxe(tcoJ{LZiOJ}B^lzH*Y!PE?pxyorX zx|Z1U@?8cU`nzyylq(mH#BoG&$y&>~XRqw_0rI(^LgCGP-=pG{DJ?V7Y5qY^m~wVp zZkEGv;Z|ff4-hWfbhfxyGU`n!gSKQdUH<;Aw)HXXREHDYq(3;t0{f2;ImM2Wnk?Q2 zyRn%d_UDIV<6(x8aCAcZ#EBLxU*q+vKYdk|JG7|VaRAdhjKpD_=+pjn#vw9Oa;ZI- z_GJW;7?jjuCwNDKB^-BPdT81OtYl1bxqBEV{2FiTDxuuXA=brdoYVAznlRn6yFhYEV(&zE$mP>bi zXP*2qhUHa>%>80O8#%w6?<%~iQI+I0{i3Yr+*Y+->IrLN3I-`dI5NqPz6TPhxaCP% z4Y@ z)$3R~ZW+Sz`qKAo701@%{-8SU7>G`+2j_|n$jWwQ!!r-kJAzE?qq24d>!$p+!~g;us=5(ZJiq zc=nMD=d=RKbUM;(@?Evq$b{%OiG0yB;Qn0~oNi~K_0b8vKCZ;2CUT(09EXTXrgfEj zE^{WuogS;7LoTRM!WaITAHzF89l}#3Cq{UA&3j1B^7E9v(y65;F>=|M^Rs9NIN<#nvv6z`$4*rAz+9SjkJ;mBe=81?^nj5UuN% z)awXs*3BKq(^Ez`9CfAZmJHfmupv#fWuYgcsrAX3bE^xPIyHymS96)Ys1|9?Z&mm8 z!h8QSi2SX_te7a=iBDO?pRwbG3TAQBDKzgX{>Ar2yj_}sS*7R>?|xBbyrG{+M z5>Mg^sk`#UC-osu{I9;mSWaN)ge2C_llOAZ0v1$EWzd^){B~8Vo*|#qiCwz5Pn=47 z;ZIsLlD}J}aQ$RYN&Ig-i%&)o+f5JiE2Z>)og~`3a+g_q;#bw0!`qJYdH!K8U(ZYK@vw`k@f<^}(k9}v$e1Jl$i5GNi#eDRAq;^jKpgxulmW&F?t19n<#am}O-)G`b@Cvt_nS5I=N>#t~TA zS+W10eDXzyT6?JUYim7FZJ#_-Aq!+5y6w*z*^d@4cV&oG8iSJ@Fnlzbeybue&$FU( zYALWtwk#feU;W&tP3&x6nejLiw!l^LFiTjx){%b)CetP?fm3OI%u6p84Q43@ zuM0K4PQ!g2SQ}HVT-Oa|{yTR}W`?j>=5eiFma}MzHCyF3e6l}+E&Y?FPL#adfpf?m zSHLifx*R(4LtRYQ!E}Yx=(kPSGT)uuCYjv4?7)wg^8WOa92H@dWfT>#vWuK+mgO?S z=9Zc={Hk&{k>@CI3N5djO5fR&zEbmz95j-C=E;O5C$eYUXq@gb1@wi)DQRS^I6 zwaS|COl934M$DK%;>G*7OJDq0aXFF`?9BH6;>CX)gQb@pqu!P& zs1c5f+0UWOndDDfGvQ;4)_zs1On#A^ia*DDaNSvSc{bKW{w{f|!_u&ndTf$ybA0x` zS2~9!k2ZK5M+Td-^?|*dVY69KXwRm!0Qx3Rqfy*Qc9s^C@+t!(=@)3ao~FOcA?>!3 z=~7GB`NIkA;7h+abME>%aCBiNS!9879Mjpw^}kXy@2&UQzU0RpQd3g$oTV_s0>{ggy6nX0my`%rEYaV#ZeKlSqDO z+i@k>iLRpdh*B=EYs0t^d(|(?4H;-9oJh&vY`x2qA#YQdS?5xL&G$c5shN88*&Kw^3LTj}yAdpXVSQbD{yX7G*BjDv*#M#Q%8AO)LT{DS zx7xKhb@aAc?ke|}@E*rCGiFn%9WC8*dEdoZeA>d$8x@E5b`$1ZEM{y%1`#JqdGU8` zo^f40uP~(OeV8x<4RF2aNvW3Ga!h(Z^G!WW6sXXcjn_N!yhfEc zy;FscwfUo=Ki!1y_jZ*#N9;4;H#cSt3y_&a63->?&Fpw7M<1l}*}as{4O`PF@whU( zq$hsJ(UNhmkCD7Xi;FT43>^(>+-UhnEC!~Q=oI9!FCmF`t@H5e)``OImzBHdU;Y(O zTi4zCJTvfMRFr&wnfoVR_T~@as>C&Q<4AkqI=spgy+b~GTfJ2sp1oEv*G}YVzewg7=<@H`Y_!6|c@a~f@p>I*N#^*o2WsjQZCtmHhC&@a zu9G~D)hV1BE*g~gA)-^6%;QFiWtREuIFiPs%v>6oHxxG64OR7C_%;dAT#!1h?n`^J zF6NMXz@CtU=#pg%v*L(A~}$Nq+d; zHum)G=grL($<$qGEgazrUJB3mZd=W-bHKeF>Xq9{wPmFbEeZ<>c&#`gT8R_Vi`e-{ zFrw>jb)Xgiu;Bb>z(1Ael?X@Th?)iS`AomHYOY zQIW>$#SG~z9N1GzyZtb>51UGd*Wy1e9ZAf|OnUkVSD?s+x(gzisU`ZJFXF=!ZT_IC zl7pk+b!xhvwrPF(U%#PRJN9F8f}d~|Ve1=P7Rx;mf6$4feN*VzJDU6cRt(lDApKnm zu2w}9{TzZxn}1Zp`=T{#Jdal$t!UB2nZnc@e8;(=cG!|7XAcuM@kriS%!srsc1-z| zV;A}g`{}s~4ID1npc4ptW6l7{a$2-Uo}Em|`K>dO)|Cm&?`KWW=5pN637ch^@V5JZ zSF5&MRi|(1QsFzE^^ZsLM{Ah_k;}k$o^-h!%+HN67z!u1f8AU@*ho!&SGXmg?<%eL zyH)WZ$#c6iL3;2e9KSJ=C%=k^a=kN;c88F$jbeuQV%FR#q(Jo11u}+t$+zECW6C!B7%Dm& z(YCi6FR#e^Ws3v0pMBVJGmg5#6Hk}^`k-@i?rvN`!Nx}P464nE8$;+Xzhh2;6?%sp z=x3BomkAztJP07wFPa^bP54E$S$Dol#;RR0^Uc(I5HxWkp6ycvGeNo2Z$9w-W0Kd*wj9zX_O^ZBMrMPg?ZW6WaBI)?A z32QqQQU8==_O>b2_;Z)4TcY#V4`n-KfcUWFI;GE)^u|%L$kLg++<}JqlEcv}imnzT z*;|rFK{v_ldzH(NgYEDydZT{7+8fi!=1jhCPSOB{wSmkE_XzI^FCIouqH2&C!FlpN z^o-@l{cQfP*V`V_jG9h$DJ}0u#5K5QE`EedPS{swuqDO;(`ez%+zjJkKRZh0InX(i z&c-wO9B>Y0atE>$5uecsRMT zN9w*+G!P%9YVr&=1^_h}~A{^G7_xIJ6+q!g|F4QW> zV%dHVd?EvQ+$({u*Nqv!sDwd(CsSTlMyOe1?tiLQ{be7spBF57*k&}{=Y-+-9HJV! zao91C5YKqgvdVq^dnut4GdTL?|9(B!n0nki`%DcuqRZ~pKJ-{=CiTroo(m`BKVQia z?-_uBV+1C1Ob9uVN8PL>#@ou@L&M)i)AjE5m3?~r1GTNE9s#>++M8^q z?apV9xgmt#UVY?=E zO=&~68eUh&F6r>&XfQ8)3^-HUg&V>nb4>Oi?5>Y^7c%*GsxjB=l@c5&&(X~?_Q&=_ zE9#}Xv1J4ujixfDN*A94lD{c+b4Y?C=DkNU=6M{=-&pZ^NhN~=Q#n1g0<%61h+6(Y zg)}vyq1F^^+v^Zf>P}|!G+xI#(qf{_YDA0YbyTrEMsl+6$vophF?GH@R;S#rsN@m@ zy7)xVtED?VX4)}!narewU-0x!CYCWV+@Iwn*>I)2?~zH>x^hmO=ppaPRm!%PEx#O@ z$@v!sSRV9~Y}GV^Z9Uj<*a81n1uW0>W=7DjcsI|7QS-46y01E}yRNq8n2@9!i^*Nd zVQJycqpg_?I3)Vfu&IPDNu*(>6$b~-l-Z_mEM>m3E=h~ZpgrpCQ)5hSMWg%Fi2vyT zC-~%YE!qQ%N8`wClqrl7UGBFk{{I=llg|n7S;O@-{oI3!ujH-yD_2c{%XQ>@pvY- z%j3-r4~Cad!P9Cw1uIR2n^uAI=}d0)tEBzuwhYnzrd}=_PGx`x_177(eK{Bgr%Nx( zj(cyCP%*-6fi#=v|3%_u)_#uzn$P}%l z_)#8A;>WZIMjW!>7h!y4yb}L|=#4eKPEFTq)9{lTed3wACFf+@2SE%%AW zzTWg=P^$<+Z>6DcV8L?fukU-G!FZ1fp4HT4c$>Rw%@R{CxI}ZU!W6wtuDF>Mp}XD_ z%TJ+P9hXJ>Kr7L~&E$BKJcek^Leur?9(bTskMGK7h~%{>VfoscE7zNpw)N!HT z-6&qYiRS%YbBd};=s!0Fx7;#~ue0mUy0moprUnUba>N5aB2JjGMCvQ^j2xa{c0;R3 zd^Y{#_$>RPcfAssuceb^Dn1(xuT#_Y`ri7ax~IQV6aE@b%t(J>mg#ZC!i)PI((pg$ z$l0YKY*-Y_u2fszpDm@Co8)YH7cpo-bDqaOQ=57W7EiqBa6j8qTl)H6cjd7}om~kEkESDPaUyXD4R1R0LC#Gd^IW+$IZ5X2?o>bY z#L9mwMO!rdN!rq97-7#|dB1#pBYx)M z39M`uh1MIxA`> zV7|G8Vf4U)AkklMsTQ7Lx#T-7d9S=OFR5Om4Vc(vn#>*s@FdxtVI^@i6i!3nVt33w zB~sbYfM*G%WFJW&HKv$le>Otz(th=Qx)~R%;wVTkBB+@Q&NVsMyV^4-HH5N>sq8xI zz}rA8%0IX5A)+Y{BZ_CUNoJCS=(ya~6}Qu(`OSWRThBts^xllome#ebi*p;fohEbb7=2}^4p4wXLE ziY%H%7fBDL6$duoQf&u}VAOKyv1KSe{AbVbYSA}|ZaldEcmj{dVDMDY`)Ls^dI%S8 zaS6Vs-lz>8U)1sKI{X#lg4udA{wlEH>b)HH{NqU9f2QGT6V56h3+kTCqxaP`biNnR zp}H-u4{K4jNSD2*MsdBC1?hG+G@UJdB01M3XZWEd=ONz-ie{gSs9c)KjvC2F)NsAn z9v{@C%Qsa-xE{}1gyA*SjG>|{@feiLtwXMyx*Wz=(Te^%Lv-h|kG(U=;Jf5M)U>G0 z$$FR6+a`m#S?EE|Npl`2+i}Dsn`)d%PMC<3O$?s(ELmfaOTKLqNj>t>bUo9lb$J)~ zP(``vqI=4p$-?)!ywsYqdXn2+=18V-04H4|aIFx%zUXrvODX5fSHkA=HwKcen@(%0zfL4srC{P^$5Ints2SjUQFw zrq}9KnJzzq0_hoQC@c>r8oOqT@yH#6OTk<@oj|g&5m#21kS(=miJX_h(tC(U=ey$C zFg~|-C%jtvf1E${WOD|6x2Ud)`# zgNsI-m44!14}~*3P|n7GdGon(5_@HT3;Iw>bN6&!NYA`u@1|6py{+!`(_z*70CJ_i z{af^{omXdZ+}Z|jlW|mw|1bQuBEg`LPdCIDzrTR{#?REfE=@&~+n3EXx-6P(P4#dO zR9l$`3>d}CJjs%{5KrGmR>Bl3Vn<>ax<=w((Qv(CE8C&{`j%={NAin$gwTJ)Z~~^f zV;K~V|9eX|oXjHPjMzOIsa?A&Ht|0wyz5kqi1AUT}_Y;gQx!$P-g zX34qU=czEwvtuZ=v7q~QVbm;%rKw{+e|4%)#=u+Zakz}CZuqCvaa~ON{M=$fXn&{_N=H;oj<=)J47$_)hAFiSEiJ27GC87$sdax#ment z7*{)9v~vULvqy66{X>ZfE+g*mm+Jn>%c@5MnNRNzCO*TE=)V=WKIJfLlQpA~L+N@e zoWN<)uX7YG$Jh*}{a&Q;dhP0cP*-C=syFi7TG>f%iufk3tajq}n;EpXb(TGIg6MLi zIKRLIC&yxRoTWEXBKZ#%ZF#Zct?KDHjGaT=`OnFmcV|T(c{+=5(QUM8=7(SSbnM#M z(B)~N^g)F~Aaka(sclFb+mKAVhUA}=neQ+~lRK{Lv(Mt{O%Fb`52b2OEaRt39`qm5 z_l{3u!w=yT{~xb+y`mjo4PL1=AI!0keWzMmpO;7qH)oFJ3zg!h&7~#cvlwGUM4I^Hq%T+FBK6#~G5Ed=r>)E%0!9~c z=Up0qyB6XS-k8qXPt;}6ccwgY=Y*l;PmBIVOY{uO;_RrucPv+@MY5sXO1PH!WE@Ck z=kh#kKh_ifMU`^>Igp-jJn)rVbuBvw4vfp7MTPMFzWZVJC>DEr6S6jo?sHp==pYL< z{@m#u-;{aZ52~^F4bzv7WmO#`Ue0!9(}4^+&9j&Lb1H$mBJr2+y5meS-Hv8sHcs>P zDzDYy?CpmtR=!`QgOBK6q@D?K1^z1!6;wJojaQ%XRl(8wravE zzmF=(YZ!IAjFCB|39rxEvhsNbri0{n`Qk&YRw#xG&G;nmmlV$=?9LW4`$9c>9loO8 zH!@n*LDSxG{k`8 zO`g+%6p~Ul-|!NonpE!Zii#`Q{`N(&EcaFL`a?UDa4j@ z{u#7Rx5HRw)g}AGnZHAF=SG#&{PGO^R+RIp;+=Xl_q#HenT+oo$#!TW`s6F#1YF2v zro89=R6_hRR%jrz?Ts z0sLDX!I?sR+|3I~bV^{uuf?oB@lDlSlYZnH$?40ULjNE=dZgOpzAuX|)^0d9^c9X} zim)DaICnsDUVo8Z_OdcISvMd)t`2LD4&h3q9kZtx3;*7gU+-p6x5xp^CgA*A82&+0 zH_AEi$J$KJ{#`(u*v8cM{-NA+22<|mD)*f+{n|S7>UaiPXC#B+l|TApBiYwuByXk_ z;S`o38nHr+*Xw5eQ&}DRsCGzyqUrB`^y#RFPaij~zRn=U)&&Q%@mzW?napxdKDexe zrPni(tV5sYmsL;E5m(Kh!mcOMpAsLDkJLG{bX{;?9w2!;DdP1oCRp^>uiK@` z+^baM>x}=s6`{?}s+)gH{)kmFovMfP`JlUG8Hvtnl?zX#2Jv$eKhqIY{@ySPhw16) z=v0cg?t!|y^S+8-ZbIXqsiH-*V$K{-9K&z_6M$1AtbWwaP!SY;O5xr?+>2*s_`tFxV?29BPB~I$N2C3XgE7N$ry!Cff z==HN|cRM{6HVmh`z1+W!)>H&!F}I^V-=9QFR&O{l;v)_yOWiiLquz)N?)Ngn_dh3$Obbb>@4~j5WASxN!m6CyC)(SaWa%Moy5dOxzW$sEjAy|+d;0C4#i}a>l7n2%PMoLS%H=*-)$?;y2P2p%K+BTQ$N#E(D>RO}Mrby-s!nJ!-sPTGdA9Uoi_|Y#i zg7=Gj#2;aZarQ{sE=}k3dK)I+bs)njRCGedET>R>%L(*rP=xctUpO5k%!FyfX`JK3 z8F|k7zqjY)1<|pJzx?Du(Pjolb5H*LxJdD{j!z^fwhaHc27GS%uga@3q2T{8+ZLs}?vw5iK`}r~P!Iu0>F%(*+imx@yL+3c*xlXldXD!$m`{Fjjy1!~ z?~b*u>%6*65}lL*`>)yate0fNTRXF)X@GcTWWCNXlT6!u9=NA+dU^pKjsL=N*(vo* z=1S(;p?FEZyvs%D{r{CE+3ZSmO2Ycm8qfC-@gvARZd2PdW)00JckNg8D&w7sd1XSk z_G9?E!ALay&Z4o;Bx9gA3+4=G|Ct1qPSM47RU!4X@$X;HRr{;`@jf<$ zGoDub^T3{*`59Do65q^3nU5Wxg!>qC_Qe+j+xpTz z(}q#PAp7r!up(Lx!=WOKEp9U3kFe))y)vAeihi7OG<~0%){A)BoC#Oosfp*bIq}+q z#x;a1eh=|J#_({HBX>MSCwwH7s%s^`{*&aH*9qb4m0W7}ug~*MFVwU?h71iJhUHCd zzU4WJFFOIx7#G$jiazH^ER8cHA1_gSpKB%&BKiqU>-9eJNj*%df=+{e^m|~#tlRS3 z$~odh1JQlGk@@y3$qk$_kr{tkb64&gkA;g`<)0jl->0Va27ReX!w27#ZIa}9^&5vy zh6(;PorxAbSnzar`OOc;ddNg(XBpFYVliW8#It&MDXCkV$sSmt92aWS_Fv&L^w$?1 z{6HRzk0kNBqx4F}6Eq_R9nq*qim#yac=1h$=Cm}kIl<=-sXXDqHRvqQr0A3X1iN9~ zKNELL4@UpvMg3Z-++Jgb>5+2EW~VTG>{N~GP4a0#wU^&jm6Hy%TocLpYR2>w{dA;X zE?w(8vT$gStP>OQFR>?HXDZ1zb7|eEjOz}qh)izJvp02db?!&oI+8mt_r9ppa!=^& z!_nB`TpuX+?D{s`>r;S5N<4ad3i$f|y*lGogFR;qIQvfkkNTRj@{0J8`(>~#)kB`o z(R>*&QO=*HqUA4OuK0`A4HW*lhV>#lx1@U8qiV3svsb>Jgwa^hfhae+JV>MC7Z>!L zyg4u-fsZq+nKZYUc%x)$$vn18z3O!T`Jd{)L-c$rLiplo%(|xXeIHHbmbM#?u7Qkx z9mC%JqDSpq$Wrk|r(@x7Nfk>M%tV8?aj_c>?gw)tFPh#1 zr2qLWi`8cnY5Ojld!2qM)#eSal>q5!)|(W~Q1Lxx-OQ)? z-N~X&D$w|Km6lrUDSV(Ty6CekWE>|%f0#AcMLaPRh#%v@m+yMwd5Gi6iQfFEEFtXi z2-b;TF{R--RoK5Ow%>YFGvt4ISaUL$N#2}A4E4(8nRAMuc*7X(w}JA{a-N8aWapGT z&R+PeuFpH8CKpOJq?_nsd)QF)1h%!$qivQmZMuguFfxjF<(9m*&PG@IyKcRNC#Ye) zO&=Q3cSLo@Wp~4RwK=1A+lUt2o^|hKEjr>z`SYP{em#x=cZg5TCZQshwbCydRlN~Q zzC2UmYjs#NFcjAU16D5b;h4E_Z6A9u&u0K@>LzjZs}jjpW+{War--(+jOlkjsk1@1REzUQ z;*E*KZm}`xi=43=n#s*TPwaNd%zAbVW!;SUCrxx_q4IIhB3cj@1T)tx$i(*zNC9G z>xAe$-b;UQUMdc9zguv000yq5B#+PjAB>VU$7+z>zZwq)=uuE8-Yv=T8=UDuOlb;s zC!DFaZ3wp>PTOIr=!%i=ero@LM~$vEo@8FO%E} zZ#Nhlqj2Jkjk!5p{522cPTH~zzxvHt;PFI_Y}TJqtz_?crpL`aGDp7_jqP>G6Z>k% zG5;_w$ecD>`V%dWN0R?CPtIPgnQHMtHI38bpLgP?oNLIL*21}enJRib7!BYBqa z$XqOlKUYHf<7B$ODyGZp#(4ac*_xI%HJbZ!sE0LkU%Jv$<^fheJlOFqkmJKAvGl&= ztM(9{qx4tSxD-5C6y5GPCx2^wNcrEp`)7uyS~Y%KF5!-N}Uj`7$W-ZV&* z*+4EyULBb>v_iQwmOP$@6EGJps#kX>#-B~c#MG6d^G=-G7=vrPDcaRacJ3OkOuR2`yQ-gBK5ync+D8S}6Lt#+kgyjAGpAEXprERSR34Q8q12 zsGc9jo^U-9oSkXiCxf}VZp;+FMU!U2JgZ~O*Z2Z{xg?2Sq>v41jrn}+wpxF!FNZP* zFxgx37CUn|+zJ$e+WF zt;yQ#ghAabE|j^lO6E^DYfHZ_S=Lm!$60JiMSp6(#`WeKepYkzcB%S@jKs&D$jSfu z5*F))!{0Jz_}7uH68dxLd;)2^rSEvTh)ME$xYr|}IMbT^srgtn7-Pf$(HL%g>BChE zch()t=U&z@e5+4l!J<^&&#@E^Q4ZTJqKL~c=HKP5+4;|5)qAH2)3RddTiuX$Rs8tT zHkr&@?u?BR7TTl??p&04|HrAEJfFr;mr{*i7r4A8=Y(I-$yb+iTYbn$vZ9yxp?k$= z(=S$dItHN(%$rE2c-oU|=CIr@gUFa1YCG4%SX(%4GFK~jKa%aI_3_&;n(3)&obDpu zdrM!EJH)ZQttsO|i}>s#+_&%I^+>eS&Bw4DrfPebU1V zsoFk{80`|=XFgGO0l(GpseR}@RkYc@W;o^f&>LmIR66wi8|DK{VPvCaqU%XsaO2N_qbexJX+4e^@ zUUEQX&($N~%UC{lFy{3yJG$njF=~q)y>f!Md{c7Z8d@{Dx6ExXCNr;5j>h$xMmD65 z*JYKIC;W(_U>Y_P{aQ;`rhk{rN#P9*UNV5J_((1n*l?#x0hxD`_&q8QO<(u1adT=M zx}n;I8qoOA1j=`cZuF`f3vE-m_SB7emn8e+Qz8ZqrUX4G5k6LeFnmf#lC`tbw&$vR zTz}E51<^`!+2#a`pSg7^M?QP7#>yAH_mipp)QHP{3+YrYtk1bcbh_Mu&{1_r)Tzap z#TKIVx8!=D=-^u?G4rGsOIHu#0+Orn$edxza*4@`I6 z?1hb%hKG(50m(x-6g8e2SFO0UGK=kI!jaF+#`)%R)jj5?^0e$jL@n_Fjk03Sd>3L= zDzBSJUSvu**F~$-_l_;~Jah1wmx#Ii%kHKO+xS!UU)dLBF_^`>EZ8qSvx^sGf3N9| zU5F1oSA=opfC&-O&p-J-nj*;~(zM?FKiAbJ)7NUfpEZG^yPtbio6qMYKj3aMy`9Xt zVku0Od!zZj&6|S%BnR_m0sqKZO4EAJ`?MB+-z(KnzV8~3Mo?L<%a&~p!c$2VHi{kE zRh(EW{kk5aFHhQ9gyH%m=I$)i`0t|W=hO6iIQP5`ji>0U-u$Ny>85gFT#JU7RP)S%|D-}e@{){r>6D%s)-MJ;XM^y-(GZSsdPCg ze#RI-#+)sn@TKUl6VrHHoQl~qPi#$QaBg`4HuI+u(p8Jc7I#$PbeS6k4aNSt9&t_W zSeBeB8Zyz#)paGcStQqA>C=9jutaA^acrIN!FJSS$d#&0o~?dSkt!!M zX(s2aX6_?+8y&{M66w?SF5pl^3cval(Ru%O)w0$HRr6eb(yb-OLhfCQr5FEOe4p)? zxG-*esQ3@Uxzo~y_^tVj_#!&FFyZ!Ad{r9{|5j_HkNnVkIE{tJ)~=)UzHBpacN5=r z!9+gy9nZT>F1*}a$jKhrq=e)%Zb>bc+^)f?b2`klaH3+L6+wMGvD%t}oA{O{wjIav zKjQEAbt1<*pVy6r!|j?+zw6b>zH?hSm+15Nw7OD6+9Hr0h;7E@i;;Ybd=GF?)cr_OOlJHL- zZxR0LlSJxRm1Ug3A~bzn;)L6(-jx&TyuRe3yF_5r+KfX#oW&QA!P-g}PDmz3`&02aC0GmB zUbrFhcc~%zaLtEaOgUGjDdMce5g!NtCYL?sq5O0*&; zLWSq}z9CPvUaQ}|Ot7yOCY}RbzRUi1!7`0V>EB#!AhV=TQ4Bb0z{fzztLu?KW{WcR zE~?9uL3dS019PT7i@?UbKkZ|EG2fd^^QF#wn==sI7YXRv*wd{^Ik%lN2?;CGxZdZ# zggQr>6CK zrPb%_gDRXErH^&%!ECK<#x~1=nC(tux$M)Urj6msKM`b)^Wm`gWKJjM5j!A{8IQiI z9izUhB@g-&HDWO1HY@I&lpNZH*_cSa)anfJmDY)6;cGjth#$!KMmmp|i!bD9Q&#+V zshU<5E=oYK_^VA=QE5-#72*%<=EkEtE?j>gJk@r3H0fAK&n}T1D=*f#o~G}!wrPFh zFMU(L%H&BMOBz;?quiipM-@)nW)&ge&%;Eu48H{Rz2`d@L;Cp;2vi%)YO- zR5f9eF!-Ox8wsOXc+2q_Oqt}x+;o4oRwfeSZ-n{9B3czD@Smx?zK`fQhkjFiv&2s_ zGk};fODcqCn>H+i((5j)+v~^3m0^4uEIFV{3eXl`Ydl5x)_AE7ow}^HcNCq#k_lYB zsVnQKhp;}ylT+bBQR6X0NbdMQJEY$;Tv!SE>3IGWUBkqB(jS)G&PE1|dl^iE>;t7+ z{pcjTmI(u;NBdFo3YH~EkKT!)2Bmx`6aDI$0{&Y3N3B_OTfLhq`$KLh)9;A?Q^%cQ zvQLkdbz<|tVA92By6L>N^fG0BC%Hk<{}r&zu{tjnS17}p#zZAd;=xoSe0q8j;Fih5 z^FD+J1u|(!8sB%BF?d!fhS{mC*;U4v?$ud7<)WHZY|5=w(agA_$K9=N#D9@>V!bE* z8w|!#dP?J~3!fsXM6|L=bUj+CalL9@(#P!iKxsYfFY#c$oEu=uz-{iBY)Ivg+yiWE zhVW)*6c_6Yvt~{n9XqG9>{d3v?Ek2gmuJ<&%_c-0oXEuPk~_P_kx{~r>-5cqzfKHi z^lI_BZ87EP_5!A#n!<{1qSw%{-rsGjuwETh(>z3XJv~P92#qLv<%DaOOgjJYW>Log zt_O>D#YcD@sfCgwGMOu0rJVHqtQK=$^|++XFcU8pd@!d{xF{DB)A^O}#qRGx^t~O& zP045J-Zz^r^6YPp%EP`(8?LDPD*sDALQQ?JSEdZmbs^zW3a@uNGH;_d>0hH5($J14 zEpq9!E*blOvKim5t@I$P^1E6s9IxuJAztA(-JOt)X@u2sq@;WlYeeIkxk_fTcQa@x zf9K1hqiH;-7A^e0s{YFi=utO-fZK+Azv00t(f_=D=gJ=Ghs-aFCVGj&tyvKhJW@%y zRKOLp##F!XRn5%nN0au>?A~t3;w|2M6g~LmFngSP4#^&NHaPi@gSs*8pBPBP&l*_|&_`7N}pH3BUVdym# z)AW)Wdf1Rl+hgc_R=C8XqaCy-m1s{_`t6CJZ)P0#{#entf0;0;5(yTLonfRF{c}Dl zJLzNoS~>_N=h~j)=PI&E!&!dcJuVDn-GV53M3|E(dBT&VCvsBV> zd6Aiz@clw|$r@&o%5eERIkX(i+U}yA8feCt1~d4XAYAO@)6le@%cBl*XE~@AdfJnp zSd7OGV?M~)|C~-Xy=7l{>^}hAw&D>jcIH^Mxw1~?65uyWdJ8|5UajiG1lCl`h&)c9x0BiO@){KmvH%oJ-u#eF+uyPGwbbFwbxZRrNhS(s%yy&`O$N)y_3vBP+4 zAtMiqE?u~`CMMx*94x(arV5*)kd-}!Z=+$o{S{5vSR+;iPm01Td^J~PrrNxLXxA#U zcxx?}?-|-$I2wbK_?E9XTp%pRbb9D7r2U+Z+&|WuWB*mf^uJEjue4-!1AmHi;~1Fj z#fa-esd0QFh!(iaAcvG6;_vU9g{JiyH)@R2zR&9REa6pMn8cP4LuMTDqr33DoNavx zZs|vYFjP|ut=U~=24BV3Q{~n)f?e8S(DIA&b`gHrgQ0St&=>8w2ig&$$6VyVrARk6 zj1q7DZ(A%)OQ=ed-_}TL;5zQ29jATzIoF{;CniQtrZTOa2Q3}+;BM$ zT5(*~l{-tONUt-C-i@oE*YvmgF1}=+Zew^}DgKGOo|vA_q>Hl;&36W~RXohq>q3@FkL@D05!= zN^V)P^!Xl^U^=9+Shy!rOE93>Oo0g#lp_fq{{a*+N*?_y~FTo zUCw;pIvg#&t)9O!wSAnJIy{5T*229{9mIeMQDkhl#MHBpmetZQd0oIC z@qumFRE1?Oy|5C$*dr|)5;jXt;k-23Rr<(`Xqf0uqZ#wmoI#R#b+uXyIimaRliU_N z=a=f^h5q8>7(}9Q^k;8zBB? zKC8oj8w;~am}N^XF!;-jzkX$7^TLM)qk`%AB9YHc&G{{ygh`WRUh}Ywi&sCXGnVhw z?Ka{cn5AWp9YT*bS z$tU!c99QvmnYxqxUVJ@O+*rHGhs@=%OzvPopE2TN*GZt|zXiBWdaZucf2@kT>9Krg z06&E1Xfx7*RmU=fQ6m1oCF99(375P|3tlhG!9e(YZ{!|7C$2gfZNI95!TotKX)rqr z4JGf=o%qpH(DC))d?SBC+e~JFnW=DE^SSO9OT|dxutrp+`+v{X7x5qc-tH$kW+s%y zxHC_9fQt{iG0=LXtO@d+xhP&9&cnZ1s(7eHC$3W+hZFDAlodu;iGJc(C&}Ww>d&-q zS#-DZW5%NK9Q96PMjZ#yzfWaNU>2QfPvzL%Z)!}k76E&EF;n`;F0YJ9{3+hVvq>Dc z5w7&Mk;0n}Bg4m<8=LZ(6PUzm#Ey!&BNqKZMmJBz^U&Eg6G6!h=BcDUA zKEgo?!FGBQRpeZkuBNipK8G0ZX}Ile#5LgpoITW!uo1(_&M~EMoeNcerjgad9o<|% z>=wjOD*b}D21U$qPh>`yVpybQt>!kf~N0N(|QBm{Z>9*s#20}jJ3`P0=rofAgl>H@#k7Bb|<+{D63jW zu)VGo4dpyet907?l+b9}Q`KO~Z>5!OOYg^1*dx4zdL?c=_%{oe+AcgjG!~28iNd}! zXX4*7?~pySn)WpIO?acm4f|Jhd~40f@mZ{2qsx;Dd)kQZd3dNd?QZ$=c11E4Hr?5_ zqnO98(O8a{D%mkF)E)ahYG9EKy*{V&YP>Ole{6C8A^bC&!Mw;Bz&_CsnPs-){``E_ zv>JtP+*EFkt4iAFyXxi0erQ(?pl`GZXJl@;VXW{*WsW!R`UsB6ne(8#9a=5(FsYS7 zewAD{rd8*c>s9q-kUrmPk6~Pi5&K8F65dMsC=0|Zw?=$mY0|F?wxfq>K1)+G=yFu@ zmM%6G-_d6kpQnR^`d~a{ohletzj)`O@O2$p&DC)JzBUSQ* zn|%MFECLL0jP_(#4_%tBa;IBHDhZ+&sIZXSGU@5|EH>e?=ou`YB+$jZK;wG4S89_m z{v3dT}@MY;Ah8Y@f9SFkX-eFj<{{eV&ncaTv9Ul{O=!G=Rc`I zzx9L}=ZKMwvE&Q5Q?fFRk)BR+mm1BDKH?`?=}0wMBQ9u3u8v+7n%3L3{HyA_}& zNO%D46X^6&^y0$q>?3+wyE>jsG#n{xs3hzoE!q62lxO}K>`E@hYDE?9-+7|0Wf-vW z?I~Dpe_EZiI$wYT%Io~a6p|Af`-7d0Z{-Ff^k$ZTR z5?8JXH|}M)C#5sRul+Wj&|rO{HkY%dS}J!Pr)gYI)6b{r`_!~vr+JN7-~6R&a8T~W z-a}c?OrI<5-T7&nNS7&&=+*L)?7?tiN7>M-QuNSIrqFMY%xj%H;M(Mt+P&YH*nWu^ zCCm5N#|OPHQ)rm$Ci+G@;+rHQsD4^^na=uX$-&7jXIVgXIt;17F81T$N^?&A z2$@fiXFja{ri0ca_cWdegCNT%E@quBwKANsei>uzGGuK21$e4vorSrN0Y3 z&4Nieo{dj#0?hmD#zU1ci5)Hd}d(Sp^!)eMmCbSd( zv^}`BFJzFHF#Yxw5Lox6Y9YP$7eTUb8pxWOFKop`7yc1{>H+bw+_^rE%c3Lm6kbqE z(W$nQJ58INl7Af4jEV_&RQ66CDsBkRJ>C+3M>nD_r!!pi8ZQqHlYHtJ$=PzFt3fUo zM5m%_nXPfX{6VdGA)4qGTcvNa&Yw8(59|(f;OWLx=8YFG-E=o{ZcLEuJ~KSN%3P*N zEQkIn)cAdBTCd`5O$KaytLBOR?9;kI_`fyeMxo3=x29sW#g&-Wf%M!JOKJ@VJf)w} zsZjIKVpL7ST22(h9A6n+A4|_9pyJvRn9x2GYZ|Chm~VBUY`A_ zGEVho_f5%{oT{kx)g9CK8N@dB=HA!wD zZofz=Laiwba~FQL=pf$vF!Dwqf6HBCb-E?1BD1M~aWXe{=Q8fx8?`&&mkO@0&0Cq* zR}|RNVwgPdztXww;!TSV(cGU9i$#b7mcsFv(KQ*rZSqWO_&VDjO_8?%dD=K zDOTrXjgh%#V39Km%0^OgJd`yalPQwYSfeaN^xp0bef3vZjv?La$c(P zD}!PqPiD^wVfbBPA9yJG)s`HG>ZyF#lP9?iH90c#gu0N~Ul_>4`1-*ZwceV;i$$OP zO|kR#VDA1N$IqGOq&~~wcVaw^A7#=evl$9R(69=r23u6JEYj>DN>iYRT~={pg=-b&VMXUQ{2=gk5)&OaJX zm!b&#PuY+YpTpATNfZcYOw-S|)1?MacRf&v8}xA4;Ek8B2?K81vb;x{WG>3x%xfs? z4u$eH$clQ+BoAYQ%uE(!XbYV&TR6|DjvT!) zoQgw{%>5}E(vn=Zze!_SogA7j|Dk#XTvbCC8F6dlaI~!rXmP-sZY|Pr5FPy4g(LAb zND=Lh9nsYb`6o+sp@I2mTF<}veYNY{EoFbLzvTJ`lD0<}&wn}dC?SonqMJIK7Q?;M zaX60{K-in^@rTG6AN{++9NlafO z>wx$aSG`T3@%2J}pLwW$xA~!Rf0%HrR|pX!h2M%dlPfYfH_H>_@-Ql@$6}RZ#lD7R z{O6s)x3p69GHYQr?7OinPxa4dpvqv?lN?TGR_9&Tdo|{s zK98~nNKS|;he!Ab??`xTdbR}C4rBE>;SY85W4>-awVm?GzA3#64eP}ud{>qaKdZL> zI@m7u;mm$(ZufCy?z{}N%iMV!HjXNnCeqK}LiWWR>PaTt_5$VtRsc}pnCG)WpC5-Qu#D~dc^f;`={}@GjM%uLc z6hKmf;S;8Nw|pOb%gQ ztR-EC%JWbs3!jNHpW8NuZf(Rb6K#&=i$WSkBy%IVMCRtN)UeJs)!ldtK^unBwud>J z&0IMqGrMxRcf36n%`w^Y?EiHkyZC`ig{v{kzm2CdMEc$@ov5=ngLoS+ z9^Lj6{s!^G7QO#~590|iP;$~V=yPG5XB6D!N?MCLI zaSS*$nY$5^)jpwsl96fRjV|K5t?d!#r`LYM! zN}-;KuSks9Io>ll*q)&8%0Q$ccZqpp`E9qx4(ZZ9B>xD_TJ(^Qm zRy;mbf}!M{WUely&(nIS_?K$QAY{b@?LP zR-cyRsq(r^<9e|N>k<6%gSygEpO=S%hz}5E`b>A$G*9D>+*9$7*^+xo))d#y_4dp}v?N6)*a^!V(=n2W;s>fkJQ#bN9^9Er&;8?2-+Y;KW8 z%#}ReEs@Nm@|WuTFmt9v2FpCuh`q_)g!^Z)+tiH<-(?M)mB>cX;AhV&=9Fl77seGb zJib!3>~u{1J}q4;+m&-Y-RcqL{HaA&Z1!KNEVv8vddJG*kR-xh^wlU!=85#C`?bz!dc zr2AewA}(0byp`kuZ%^e?h8M1%2VwX<0z?nBIA1b|W%i-{Z@$L$Tz)j?`d@d{g5A2J zHyA?XWMkax`|@g4DmKDp2`wIk_0kj$HFBfd54rEVX5xD@Pvd$`uQwyH#})OkzAhK- z2eHLYa(k{ilC>=c!#ZwUZ*POq2=O6~He}|i zHUoLsDVaJAeK4IKNL5S8h}>W;_X)`_+B1bz>7Q;jZI0)|k81CQUQ9~%6rZ0F9Rggr zZaV^(CBi}8q|1g{acu5x#(mMDl-vxUNt+@X9Ihc5-gndm(WTFE8jiO-w^}at{8KxN zSAQIMeqjW$N5cp{Yeh_NImgR$TeoVy#;=?6`HXt*D;(sM#(a}=PnLT+ z0YhBqpBq4NN-VMYhS*KWCZPRm1IO8E4 zy+46wy)Ah~0k`D&YG6>ngobs|TKHDQ%Y5wYvSB>=*8p944*SO@b1}tw2if?1ZU#wxhMm1|Q{?sG2O1Nnw{~5uDp+@xT;UHOCsT`Q%!ktuKhFzb? zg&Bs7@10L&d<@%O7NF_t_8hIj3)_<_MrQk?ABE#rwJ#TrS)=n#JfbrkLFPD%$|Tp> zs4JZ|9W1EW*WjnHAM zb-CzYljw1D8m^OS5d5qfC-zD{!dKyO=1Jf9ngh!Vq>tj}$$Q~zgf|K0qoZ*6bc+}u zjGW7&)6%qFL(dm#(To==zn1}pa(@^XYfa5XlAAmu3wzP+Z`GHa^IGG1c+!d|&cZR9 zDcR543TR#Zvf8oZq-rSZbh`!PIda~T7oQbZ`lQiq1K4biCSBNDjn*n$0`i5KE}pPI za_;q!x%T&~>Z+Lu%cBArB7B=oT^)sOB>c184w8T3$;Xgr@l6XSDL9`aqY`-7E}!(* zlCAydsXCWvOqcZGobGHuy#Pntw@H5@%N48fLs*hDi6;Kyn>(Jzk$>Z*2Pb*ghSiC) zzo~}bl01-B=_|5r$}0lvgCAeKCOjI6=qgQ`>3z#Ld6pm z`9PPp$A__5_T#vQPE4F9+PIUR?2zYZpkK5w#Pt|uk;hT1C}Pqi4?sg+LTiuj>e7NA zDsz_+!BIn5t!GWlO9%GYX0u831%>BFGb1Vj4}sNKyqd(j zL_?Xu3NxW@CN*so36nh~?=zh8{{2`JC;6iJvDiG7{D?Mf&{}s_P3v#KkL-yY(h>cj z_++kaPA2c19b1+P`{2g}=4jh9E1`tzC&jP!PUh$u*86NJ`|YhOs?(Rgv_9(3avxI; zd=Sn_b}})Zc33+M_azZZ=HhH7YEUi`to$%ME>0_`D63)sHT(3Uy`$SqxUa$QvbTzrYoHEuOoTd zz@EPkJJKdJkNr;vlCn3159{MODZbE{y1ByYiY4ez5gk^xCO+eZ3cuW&ls&!-on=D3 zIsP2c75>dvHxAo+a$(vO9DB%2eo+aVg8@^VBxq4K4;7)VliEp0a z#PnK{iy?E-W@#a4t&gVacvDWc%i+Y}IO@mdXI=U>IP{wO=2tZNpuDDhP#xu*+|ywQQ{0Vd zbI^f*WcIm#swdNBF8cXM6f;NaaPGVG7*Z$mduO4>_i%&G^%&ako$_1I8;?p~&iWX$ zu(>M{A5sVw-BV4`LsuF_v#+^1emeQA6NYbx>%uo3)dJtdYii71T^2PQM2+6UiJI%m z<0+!oyzNT*d~f21C$c8ZhD(E_pS*Son^hjCn$?y(&T3S*`kNoMP58J?@(w1t;;>!j zYW?hq^cacXh;bx{_q*rrOwwwnQP4h}(<2+wF6N=?F6(WdHp6Ir+K{QycFg-b9mBg0 zw7cNV@?x0>yfmeq=uqyhOk|m8M>MThYk6HVv-YWhv8HSc7XR%*OY+9MNzW^n9a_Ew z&I_jVx(u={ElAv6O!5Tr@XGy9(|Vh0HKQQliVEFjz|s<76v~|aj;$?y$7j%gx+72J z-oI5?5{o;T5&A-ScQ=y6LtDi3@>=YxyG0#2smOhlhT|+lmL2rqbM?kVs zP9b@hv+zEJ%kWt4I=!XecdZ7|mmjNmUwwk=jNydzDOS!C9rd0pW~KYk>H8>#y2@HA zoNBj+MO<2%%EphPyO4YMq94cA^?^pL87}#wlIxwb&Wdn8V&^*Z$ zpG?V?9zTT*$wl-XqQ#-__mzEFU%H?4W9S3%p^g?_m+0h_uRF6|Nyc#7$y|++KKj61 zR!1iAYgj&-)*I5Z9u*lU)slSiOW4QKZk7QN(XQyoJm*k|Bf+VI8QMCQH##=V`=1Ve zNjgo|6=_`0t$RC;O}M3wcGsnSQV_kxA8^b;ekXFSH8PR8c5^GqEQpl5mpM;!3Ta|8 ziM5h*?jKwo?OxB6iJVWm-j)16cS{^XoVjY9MduNo)HNKwU0Ks5jTT{ctQQY;XZv({osBRuONrPi`p3J%^Ss-d z&$Fs9>u67+cL;M+IA>4ugjKXoo_En`Y;EDjefP1nsA9?Ew!*=E7scU;xnz#`rQUD- zq+0*m3%zJ>E*2P(p5ej9)yZ6|?!w5DkvP7YC|YOX`9I1de0VAY>*vz^lNOC)E7b9Z z;tT07cf@YGtWR;}@10W^GFo)$;sYy4ju&Q!3Ar`oyd!;tj#mrW-0PPb{@;1kdaL+P zcZ75Bm@XDq9e5R$#?-;CFP zj$w_N9T_bOY4O{UpwLKWhDYE%-<|~NpY)f{SfiihM$~IhGvV(Zkn`1A8@b1Jus|?( zjPH=e&e3+{efJ`M+*rD3Td-(m5$^X>gppCKalJc-Ym;43sZOpmo-u}_&j*TC6 z9;Q<8&5ejA!-e%Ug*;0~F1(Yz{GUu(ypw!i4L_fzTmVh$X?njMe!M>Wgr%F|+>K>H zNM;y(886wqGRIqAA-WC8F#7LjC{5RSQ&?AWFg>#wBmRO*pBvMA^jmebPaol}xDYYJ zlDnn$xF@9J+QymAu7jDsI~*%JE17|0vn)S>%fheIw4SCqB%{8y*_m}j-M+6+uI)tT zbWtn`lR0?JT=JDW?H5Q!g=9m=COUBMW&xLv$oxU>0qt7Wr~ZWx%4N0=Iei1MJ0tU) zlMakAOU3<`6XQ~R81+VSUdxTiGcVxS!dRZxmz-=3>rGi%pNCz4DK~3v{OSy%)*>VAc^!_V*~H%74jteJjwop4N=+G=E!%F14!R@7b5l(jWXT zx}b?!@hnYrV*cL2-CY?F0v*1%~R*W2k@gOHpL%6(lw+VmD3`xz7NuXo_<648Af zaAixk0Lkc`$oh=`&GWOEA{k~=ALihAsX01pRwz5+`P#eYiYH$<$Rq8jwus z5IyE|7ixLP`chrk47((^T*G?Z&9zuP?}qAn(2)LdvSIybxBJo<5{?H za;}zYT<@bxOZ;MPsk@^rgqxR1pta;}({RzyFOfC;hT8|`Rudpa$jA%Bwh-n51G?M%O zcIz&z>;FRi=%y{aIUn4@tl0Fg7ypXSw9Wzt=9)_W=I#j5gAHKJg?vIgWl`BLTjP3_ z-Kw&F*=v;&;=qi~F|?hoOMav$|7D0~<&z`74v*%Pe;l#9?P&C-oEVuw)O#Xxu7Dq^ z?d*4Iv-rlUbqZp%n<;g7+fp?q6D{GiFYz2jyU}5s`e{MKX?b|8O{M6k_~)P3C*i-x zYR^3b$}dK-UHUhXhunD4NpdDgmewilR19#zx2V>J!yxmRg*wJSy>EmRUIjL0RdMQg${-a*jdZyAl3FoB42*Ny# z*_Q6ai6`l__4MLnbO3f!;zXCKPct3id`+LsyJ9)Fx?ff)kN>DDqVwoKOgKHKEQlQJ zOYi0BobTsL^EZ>R5H`z`7;F0a%^>(e5{dI?$o;4xuM=*o&+m14-g7t|Wv(`*Qgm{2 zMJFz~WxijA;+r&?x~{IwF)3tDawdBHG|%PMwBC-YPt`vW_f(rLHtcF1$x!cpgv&Xr ztZN$Y)``!pOAxX65W?oCBge?s{TlicI&HTlyoR`WX=dKD_BmUH3LukKHm!-X(nR6d*qdJ9Bx3*b3+&>obS&?4!p0MLG&Cs zPww*NVHfE)j*&GuOmxj0U{;9a^E$7|lhAfMIp$xS8(pUVflM+qny}$+AF)2)nbEefJKN2ra zq*tOfEC0#k^mF0Db5qyaOAOvE?dgl95$Z?HOz_0=5tdew`LZV|%#2bb<8 zT)Zj2_k<#i>xIWw#joR2rC!K=+G-RtMp$rlsvFfO2m^Yvr*L4$;x;s%+Hb5em|4i^ zaOorbD$=-bdbe$JmMvSX`faf$>y^pet~?_n~}h8off-6$C-D@CUnT~j#L zZ4%h}wus1-+B^)stH%BjFS3qkYc3lxv!^RRYo_B|?82d6GQa&9%l*dItQY^o?1QQ7 zTvW)qtBrVY|A(?Q*5|+3L;2Csl*dbiTYoDJ!^OhY+2PB*zsB=Sad^fBj9;`2xGXt?CR*Y%{vDmjb4aKLDepEl!|wy70)R z*2n8grD}fC8YA(ITr<<-dIG{P#VcOUR z9P-apJ&Gsd{P%p(hWEfP*nwx|8T9)GwtFqYA zQYG4wYahtS5yH)u{rUOg6rQeh05Zbj=-r+Kk6=!_bK_|<#HdWT0fXNvs0-m{gQdti&)k@j((j=c>bvoHwIl( zYtI{U_!ud)OU6b%b()T8?TlB)3<~QzHSx2%0 z|5IqTK8VCpIHTCbVR7tj_ z&sWi58@VtndJG1}kl1DTJox zao)T+n}uD3op1@JU@rd6+09(p-gpju|IJ7BnvSMOQf0c1p7`HCM0S$b8Q8{RTQ(5AhtM_sQ_Z+mT)zE@# zFVg60mY$P5Vn`>Cz{Ca#YRw(~qVdGbxyvL!2<=b~r4g zN^KA9?=2uXa|&mKNTO(C>{DX)$TcjDv}PC1nc^P&YUgy1U% zT+4}Mw1U=5){Q7xAvY%(WkBp4(FBQyo=tMXk>;k!)$B`*Ds)E!=|(^ey=tkhK+l(dB5I}SRG*vN9I!P*I>?Q4voo0?_cGh*qF4Pcx|t*c zitlEVy*7b4T6r4RYcckN8a4Bk>Jz5-f1I6VSC!k_#T5|{4D9X>klb{vNq2*Qq=-m| zv~+jb-QArWJFvSuL2N7xp80=1!hUteIA?fnx!HTIE9U%74A?yd**)~IEy@O+wR6$e z+ZmT7K~Vjch+g?x$huvOI`VMyPLcmf|E$apucB1EDy+Ih)z%Jf2<0xR&#u7HAGysJ8IH$@$ZZSevg{T z%qM1nXV@dSLnUmx79vHp442P$zzv_rLbtmv_SgDCVTch@x>-Zdrx0G($pN(u!JzmQ zT+e0>)%jxFy_kvdyQt$#>xOq{--)<%4Vmbaf}J4}T~o?=$9pI*a-6)O-K2gF;n4Mtrk{+t(>L^DY55j~if4 z|62U`l8fMFHDtN`6C+m0!n{@$cGo@N@Pvo-6?ddA;XKyU3*FuXaAuf+=#I>rcu!rj zLK=Bk)#$aQKY41mMPnahY@xI8+?%mUAwj59G)@G4Uu(=S*;Zg&p0)G<>u*%zi^QTP*X2xsc31INvVTrG1- zq0p}he9z9 zlk8s#oqWz|*OWjbN)OfHj!-?GhARhL*k9^0k8Cz#Gniv#wgm0(Ct}OZB`BYwjK%&Z zh3!NG9G{v(|DrnPY_ovwk8Dij_ppADBQ7wX5W^CuFI$*Ol4=g7W4235(h)3iwH{D)Kaijdp*P$ZvI z!EA4DcroL8^ad-`QU8K!E8Gm4fvjiIxJs^A!Ylfu;&Wm0wg`)lbi}_qoe)di#KAX~ zm@-=z{d@S}-?%J%TQ`*+2OlJ^BEOBFzYL#BXeK3NL|_#bbyGy>$7kZ@E&9h!cw#zp zw%rTe(SLs`s_wFeu=m8<6=d0ZG8^nk8CG(C5i?1vs?U)76#JtKfxYmB6Y^jsnGvdfIsC5m{a~w*g8(c8hckb(GPpDJ9+0Rx!4};f<}Wt)5ZH8R6!kjHRiz&) zq`DTD(}R${uoik-263l(OSB%-#BJ+ocvYg0pPA-3)|7(=V{-5RbAlh|bz01TIT2Kf zbEeeQkCA>4bls*0tb7iO^*?p7QY{6)9#6oG274q?6Oy59f#Q|SmDWzhuEzpXcUPm+ z+j!i1$n*Z1B2PY*v#9}$m3@ANSuzYM(=rE%{| zKUg;f%=r3G#BC)fc*<0~_`tkE4QG6xnTz~K^f|r?fT?35Mh64NcPbI&nv0zm>3?xN zD)MEwiGIU;kiVrACj5TxiLk`1hC)=$;m#;O3XeHEKjAHbjNg2OPtJ$)^?69~QO4b+ z)R-?;hwjAbxL_~g3%4d8i#c1}EfF%@1J}+b;8CF|+I20*rs8a7B~rKHA{{?+NBd3) zPFpX|zMqC4^z$wPc|$U0SkYFF?7adP^{IColZE*UxubbI7jm&>=sj{Sxy(vfyYi1X zvs?w6&(lwJh4<$c8{XAfDEecCN!-5==@5lSE&>~_#XCKd>Z#ws1|5L zE5REZ`{|*xxiyB;=hrKX_jK|!X7DDWEQUD%CMEbenYv1H2wDzzhg|O?0^~rRiX$83 zE9Wo{PT0~l7bjz#F@gQ*#U!$gPwH~6P>14W={T!B2cP6|8bPqgImWtpxyCV7}=+ z1m6}|!)#tQ&Nf(KANv5O$8l)k{c!sZ=d({|<4RaLLOghOUiu=seb#{MabN6+)Z~!I z1!3*UmwLgx`6Ir#yCxm$xXZ1hUUYm`CU*TP$IZ}Q)U!Pi{S_oQK0F)~_Gt3|Z3mT$ z^zS^kq|TDHPt+{DK4pUO&nmIMGx@(u^WZbIG8-ya*fG{}XM;eu>P-5;#@)kz32Wq8E<1Urzl5b%e`r zg)!rrS!v{o_B&UO*2nqiz&S-tr=f_7cqHnhG~sq31kN@(m@|=nR`TZNrcpnr%xtjn z$;?@2cHPfPY#ovf2<%h7cdMo}0XyS%TFh+jTLKNpc8|f#RKgSUx*cV@X zlnj@HM$DnEfZy9Jq+F*yGmg6T)!f58=l9wZ@h z2M|8L3b94G&{|i{noGLp`42fHB2VuS3cDgObZ;G*X(~ubwSe-+5`0)^g$GW-=vPNi z+|IFZcUy#4^zrGYEoRnad-i6ZM27+D)bmfpq$(o}Ftmr)qda^PR%r5#reA~J&lk)$ z;cTLIWFa#-ieVS?S0pWdDo%Qk*S19gV|8mGRMI`0c5SklG$j3sA|@I zXY%o^Hec%J$wqWSXT4T2wu>6Zc)4T5YeW24ORgewCa-ai)Q~w1r?1B%E}oesGdZ8| z$VKJP5~=$fsq3>Bypj*M1HR?rTa!7=ReU(Bj)3QKD~#A& z&RRMjucwzvU7tzo)7V38i06!bV#=*BV{_3_YcZsK-ei*=*faf$m=vlG>v`^QsnXGQ;u7z*rA!xsVx~Gxem}!v-`#d|W%pf1-@N77~vBx5{YSg+E zU?B6ErG4HItr1Y{@k%5zPcM@-;6Y^rq|PwIwY9NWzt$R8eyAZtCmglZVQK8HK>F=z zP&Oyui1k_ZifdwAf)#dO&Vg^CI+E5K;a^H2UQok!_>>0@jSI)r8hSB?u-{miizBI3 zkaiy@?eiua`67-S{VvS=>q6IV8Wa-9-$=57BDn`ke%s=Re8Q=rMq|!jrD_ivjJ3{+?ng0!#q1vc>Ik)1NmT@VJ*Jh9A|#EmcblTaf{EWosO#hkHO%G@MTwK;c9Ywv!$EBdi#b zv46z6eRsv>N6ZcI3BdL@X4uff0%0h?_^+-Q$j?XH&UkVvG%!}S0N*UwTa)XkZXk!T z%s1>|I|eg2!w6gh9QaIj4A1hUQ+7x@9)^A+WPRml+?0M=BkA%}t+#&@ z7o0u|6`GO^9m7&rRXLGm#jYoFIb&Y>3+ zo<0{7H>u&f^>k#rv44DKgVlDqWR+5HTH=X?D`vrw??sLN+vLaDxP84G%5PW`sLG>X zg9;vhHph>#Q|B5Zri>Kp_TR3_fYf~#%j2_JS`S!9DsndGFdPzH&KOT$C zM!?A-ChXV9T~5wH*hBVZYE$SfiN?2L6V$jA;bJf5(H)>4tTTVU>8r$HVI8&7`m^+q(as;T5sBy$PyGP>YIQq{(80R~DyQY~BY(K?~VWf?&Qx9}N%8@OEh~ zMsByq>w3n&`lLwHby_;Qlq< z4z|yO@y<5^floDHNPV&zHFMv$QMWO;56Tz37N^cIw>r%Y`bDNN)g%AKClBN9aIPrh zgA)&9@GFY*kUd3+3C}?NnPNDP9)*@u`$aD+E7Vkx>#C%OeeQu+?VpH?@|Ngv)ES>o zF$?!X6l!!9;n&z|SjARy*Chv`)-03@CgDzRU(D)ngt*@}s9%wTu}>V)w49kw#R>TD z3ptkeN-+9UGGcy|N$r>Uo2mr;SIjqm6W&l(iAAci zJ~lonL9uHFis`%F+1vqLa%A9hdK~(wSYr}%s5W)7Lj!l*mjj$|U~M2S-;2YQugswz zR)QSk6wGU7E>!bJ+i~Z9<>dgE3Vxv+rzRBofdRYnN z6jRZxR0e53FAWO4pnTz)m}9OB``CEw?x}?a4;#)w$gN1Tg+H0rlS)&Oa9$5XsE3VX zF3m^xYQ(kch!bn>3(ri>SC&PPe`JiNc{XrmUGUf68FP1#f88k!2ROg(n@wNc_bgoX ztAez9lKN~pgpT?uN~pUYGnU!izQ%B@x5ePO`4}?L2`MTe7~PPBv`wZscE1=oBgjE+ zC`I>xzoPHOU!wOm=C;(&KstRIUlv&+i1T@S_G6dBLh-2Kw4~ z;3jq76aPA3!jn80<~YHiYb=hL@y}TZ$P6vV(FhM+@cQ2j&pA*`T9-kx+1^jxw!LK6aD7;;VR!t zX^9zL<>jI)EFtSTg`Tn~X1^N4WnnQA4`yJ^r4sD8(2?xYc32#!3U6}Y2j!b0#MuY8 zeitIPj~^@_M$zwAfM>B5c&%6in;v;sh+0VdJpGp)&}h*ldR*py%y=q1sZX2t$`ZDN z@~MyEOrP`3rPL?SxM_eI@&p!g&UZtH@3p))4qUk^-tE#tS_XIQza+@)O@Bu)`-HjF zv5auUspMD~UZIa+XDv<{&ql$qTAV8x3Y)V(MUSkL-Ic6laL#;%Um8wi5;-%9PhChE6fSggO7?MmOjmfY^e?X2tlx48415& zOSCK^*K|`3u5K+x^G!v3R(d0jyJ@4_+(1N~kzj3&1-|fouU$tT`C|`ge~pFaULAOJ zB`>BZoijw~^-`a%bi(Azk4649OXT)C{XxsblXSM}++`LdzXftTG}0`ZkboF%ZE8)A9E>;2f8SittQ0>E)x* zeh3mRUWicg%QD`^BI>9%VlF#j{Ny~;AECc8&4IZiDTo_jhG}POnI)D7v*>E6pLagI zQCLs-Cx*YBj8#KCu=Kh-;yI`*U7?hg`t$U+wW_ zB)>;HwJ|$3octx`|@VyGsR>ftcE_6H{Gzw_m22* zWHJ;R$$qysL_N>Vp&I;7saqj&s}E*z&w7md-Y21@c;J_X4r9xtu3M$m+c?H|#KWsi zV%%9x)UTg{p=%6*m1cN1C=dI6Y)~-F9}?SW=+D(h$b~}4=B6RQvPA0p)@ivSb~$_$ zGOZI}`NbK|FO6_yi32*bKho1@uGd{(?EFd%(E=mps4&lsitp;8GT6qP7jhjtzg;!8K?RvpN}xYL~5M9xGw^->D_T%Q_WgZk`TI2Kf4?vrogQs4v8`-%ihi{fEU z4n^(?&Yn46yU+U5{9rI9EKFkWB*E643N-zjjeze}Qa?}HoWhb3irAn0QG9RKME8~u zZmZM~7U6{w^9=I$d{A`Fj=g9B`owF(Yaaa)o0$J_sSax%%0g>)J6L;g?wv@l?MOpp zM_QviCm#;|Y@qXDI=UIO0h&jQp#YH7e$jOHy=%XIK z&ag(?FfvT1I$-YzKSXRyKofNf9=FPH!D}{-Kjq$EW*FYdwFw>0tG2pLMcgb)w2~k1 zf3XxVuF?Ns5)4=O7|gPvZsTz!e%KdbC#3fuNvneoAFRNvocrQ(p&EvL4Zs6UV_ey6 zg$s@OnBRxKpy=rk=?RFXo~qxKV*JaE$_s+#=edvY> zylcZBWkOqGj8$_ucWWVk)}q zr;+*fn+%kn?a1^#BwVx1@ZS=0ZaZkG8AVJs9Ndi)$X4IPzQ_T^82iyHYk3 zQ|G~?W0UY*-XuoP(?t9EaX5EQ8v)dtHcVl5Vs{tZE|`JtIVo_YPVi`PHLk|f$Nskl zr;c}l`T2w5Tqg+*$fP1YLkD~O$c>cC#}^$3gdCUxcX~5+6*2EQsSdRSC({zKHUF zB$VzL)Bm1;%(vQbE-=T>&SZF-0KHebAlNz#1B*It z+0!!-M%N{nI?@Jrw8?eXcEq;r?y#y&N0+}kSa`J(_Nf`Td$$^I1Ad9sgTILhz^!x9(d zlSlK0I>od4+*iRK>B-Y!XAuwAjwY=83s7W9&ic?IJpIA^xIOKWO}@=G>aqHhGe2mm zEBA|8knvt~i$mo9C+)EvtkTkZS82+Y&V znSmEKS-)_uwB;4|u;b~k+TxC%ifK4zX@uZ!73k`mg=WsMW4m?6{hTMF(^DPD-HD=S zQyKI(Rv!y`Q$gC0d8Fv1YiY)fG3&8*n9rEpXp3ad2@qLIEh1C?+%57)zpYrK<( z79g`ha4yd5kS^r%&ZO>ed?|Jg$iafCrDO^!!|3g4aUdEPyfFi%-PCa<#vCto=?_?C zg6uYORc?f%S{P%*2kNYh$TvM+j=vw((4*AIqF_5xBn&=l9+eUXf?SZyf^+-5!QBkF@b>jTz9Ahd&c+5x6-F%~nb1 zGmLtuCT3{nCqe048Lk`s5vN}=Q$tM)@~47Ox>*-@U-O<`o`)mDZQ<%V9o;^&XZU4| znJJ}ceUpu&ie>mWv=c^Hv_sSmRV;pChYqJXi*#~=V|pPvX_6D&pEE^^MAV1SYo1Yx z^M`rvl$A>TJbWL3i7%cD?;!f+2PWYy`At_@d&v41VDn+}X;)K|a(Wgndm16vr50Ny zIWVuNg|t3eY4s={s${U!^_?jCrh<>pe6jYFCSE&`kEopq&15_DP4PinV*+;eGlqvx zIWAtEjVY|lrR81BA!}yDnLDDTlP->#&crZp37*d5dCm82I>-qhGyR}?J{xkJYyUl7 zfm!>qPfzMiqy%CPNW{F z6h=RXqnEfV?t1`omHAM*qzAb~D>R&Eew?2jHq$qD#~}t;)L&_Rti!Hz>1Y;nrGDPq z%YTJ>9`%YD8kq0w2P;`!R4Z5^mwWx*d#n)ZI}KyL#X#X5S^K<$%gLdcS51%AeR;%0 z?-IF+cX74tS2;AT$?O#GMwC3)&0_EXc|D;l%OvD`=f(s%D9Otmh9-jO!Zswe}caB@ZF z2BTrB9+tbi1*Q$Dg~yRxgtS&l-ET_kTQY0c2TybFig7jc*~mm<_B_tF z=ac=am4^iK`8?)YBX3F=KFw5xMN|noJH_J{Idp@T^oDI-qp<&^jx*vZ6BHpS48uar(QiwQmS3xJ{Y** zUu7<$db;CklLwZaN`tPfKAi8@qH9(vRG(4jbiXSiQr?R(>kV*5tubIAd2J zJrozy;1H<~pTpJcwKE`7Py=as9@26d3%5TJ;rFkK`rhQQC`4h1wFyqynxoo37xVgA zGeaQ`C*MaQ{*DR4xo=$kJr#3DNY8Ad!spw+V*Dx|ybP!r=hfcxt@tt?T zese#F!_09nHpIT$l~_V{b2aK?AV>P+`dFal1L|XptfzPet>4!Vw#50AlhLq~oJ#g26K=WTdyOwz z4^h_~Ga1|L=VL_o94z{?5Yp( z)WOy>wLsl$Y9#8U)? z%)-q_73^^pvHJKk@$akzH)=zeStY>`JzH4mFh9M?5_5lelMxe#k(UKRgUaz(jyV9$ zrI7Z1m-cxnr(cWr%J)QlM?GksAX3?73!$*^)rA56E5HVNK6WJ_e4r#Peys*msB;n^;r4jOTm#orhD= z%$wY&0Beg5nBSxhc@s;FVNSjGSR2@^%t5;aHW*8-(5jO>SN}3sgL#pMlrr(zuNc*m zlR|0lG0`%}7(u5}A-O#g!8aYDM0Uh}b9;QU@kh^3SulE}hEzjpE|}rcC#(vce*G1J zU%Oz&Ts8dQd?Ii!br-B#Ct2lVW|Rx&^qYxn)}M#CKRMuEhM;Fzs8~V{=oe))tobE2 zy;Va~kOvAqB{012jB3tzuQ^(ACg+8dk{H&KHW*DF$6J0cH-#5Np}Hfsd%qMLqb8td zsRInXjL^=Ae7^3vyg!+T^E?>$N@ih#n?P)10XAMr$AHg;Qa?}H{h#XbKcaGei`X|z z3&!$Ou#$aoS85j(`sHG=x&s`ur{WKHVxO;SxMeX1XGxjV^=`+>N@&{oPRyY$ zXlk$@jP9%8SBVu~XQaU?$rjtvEwJHKGL-Yw5tL3I(xy1p+tho%l||428K_LufLxX* zqPb5$cZGdiHFL-3+d}@UA6#oAq50JaWz@@z=Uk%$^=r~{jZ8Z|6~k>_i9f5gu$eOm zU-D$HhH$Ryu@mfUR53uo5ZcvFNVvd0eYh)5C;Q{BM+{lIrg%2742G;*HI9@?y}vEg?T8iL z?a;o!0Xw;GUD=sA;=b?giNs_r$<( zEhwLeMC}0$MBlc@ap|^w34d8DV$wkv7;BQ_6zzfqBWVr_u+VUkz`1*Ye$Jr2c$L+gC~3Z+bWV z5<6DD67T5a3M+^}y{0C5Jfa?&oOQKXHn7VLz{sLFME@tDCZB#t`houKAtzHxKSJK; zQ5fRbEJ`^q)mjpTF6^V0-UnRtRwIF$(e?u?@RBoPNnd}2Q$IavKpj$^)?k|o`DX(j zi#sn_AD{5WS9?vQ>AIm=BOUbv>Ah_Ag#8Tq_~aZByNXTjoPdwSh z{-Bo!W`3oPg*(n)5qXG_vqXJP1msr7au;ug_!Wil;C$(3P9X+;>54a9K8SzJ$FY!! z!t?IxXbrZ*RU`V`&fDU4XR>C$CBW^p7Fq&ouwpFPoV{zLex6Z$n=o#FUMy|U#<_q< z#4OXt25RLhR}|p*L332xo{38CVFykCZhk2ymoE=xc_r9%MizyWW$?mH17+o2FquMM zcD^H$stWen4(z0JmG9CRVYK!){e*>(JBUwn1JXC=1;9K;hx$a z+FNoFOx9uilxXIFh2tf6k+vgv&UY!mm^b<8q0|wwjeo@NB28E-#-f+D4z3Nh$GXNG zG-o&)%2vLqko?oY{?_@PVb3O->3zDjQ~uzs!LuAptC0zhhc>) zem=-u9D&;kIv6#t2#)S4*m#z@E-Cw}q>ZX=%GmKse>n;^s6uCzOgU81oQLYJl+us zy}Ze-NkdH{_cDrANTP4`4d)>jWcy*IT$AuzsfNE>rXpgDHuR`DZf?m!X1E<*a0je$ zH33HkX=BmKa_HG7V95k}Af(KZwqK}(Ee{pYF<6DX2tB;DG{ERa=BU!&SfX!!ABkM@&&C=Ooz`KP^3>yE_S~ zm$h*7eKijCN`vbBTB++gY5ADaa$}^em!#d}Wd`@6&-j=~ui`vhJq&#>Gv8@|D~?(7 z{*X1pW*;Bi@lS!wOE>iIR*uuGPfnK=aL1+$$3NWRc2R+BFL!2anP71<{RCt3Vf@<> z`>9Dg>mP^J&wTR^!m-sfuou=d@`rv zL_!2)ZyV#m`U33JPeF8kq10RlY5fRGwZDr+8V7~a2?=>f={SFD5~7lwVL56xB!{W{ zP4mU|D{06%WDK7h6^J{LgJ)w);p);IxrMid*LVqZnEtJFZLTF`;a-plk<=?-ijy-&ws6wlE!tLT?uM!=B>J8CJsa9cSU)?;)q zVM;mHBaK?5N=VD~`xmK*uj%BtY?R<2{q~23(;u;%c?DbZaFu$7o{a(6v>*-l3oLL) zwgNHSGjB*Lm)iH45dTHgR-O^^)p`JTg^?xd*hQbPEBEEwTO9B+IRL*0WZ-Fk&c~W7 zp=g+eTN|oiT+#^#{hGw&D|*OzAAka(3(p=l(D<7Vt(DwUl83vMe3ae?=v9^}NATYQ z$V@7gnqMF-hsQjvEAtg!3%wv!v>x^c$We-Eu|Su$G^~@ig}t{eY_(%?_OK>0-;=X| z7=)2~VROGpj7j<v0_D5(Mf$NGL;(7&$X$*#A&L=m#ITdhz$Pk$NubT+T(<;+F>*#Xk~IJw^{te8_Y7oq&EOrEr&R z7AG2Rh(!~)+f9GJ(Lt9iRhaz@vDz@%7!DWBeotn9DBJU#SmIChj zJQdBH^R3g0LHPz9wEydX!DFcZDR-qG-Gi){bS&v8LB~PWkZ&Xt`E?EcTC|C*=1XGv zF8Ys-#~_C1`Mlj$2r13OuwS+qdd?ryHzp!$H*3>CucXowFW1u_xRrTH)2Z{fv4eX>AWWmeacqVG`VA~cz@K#P@;En@ zlAA4UUb5?s0Z^bXD<)9~vvOl`U;#PEtb4*=Qg_JxZ{%wS6tyHHX@e1>S5;&7%q&cp zQH=|qev8E#kA&u0UHqLDgWb=x;TcKZYib_c$RG56=>z$!cwBjGthm0}6p9X+aNVwnb=*T#xfh_BUZu%4!MHg%4)1%Kz&y7S$7=I2;co?` z)y+tIA4!{wc=x?Beg}UO**i4w`LGWTM6qsQ-e<|w928F==gXNp3@hd%$mn6-FlMI} zB{K`H0)^@Waka}65lv>n<>wI?mLq{`s3nT($rB>){@=O=Oqf6c(FnMs)YN*5*{i*d## z18EyLpYZ9Ao;7cTQ^!d-?d%Ev$p-j0h&dG{|J#w;;6#cSR!&dEtr50p*H(rb2b`O?D77HHRP2NF#f3?bnlcgM}p^I*9u5m ze@d&%YxR@IiZMS$_Xu6s)_EgiryZ7!u_AY-jJZow(90|WW+zghbwPqJcrds7BBn$5*~Iur>A;D^^yb+`3~mQ<{(th8HRpSv40_V{85^C$objpvx!)pR3Wv0 z>_xy+F?33cDDzUsqs?Ug&osfnYUU1nr@w!p1^jO%A+LWFn#=4FGPD$figWNbw3s|s z>Gb;2KF@L18!;vRuaNwwf-klKh?HS=Rc{9bS~71b$r0w)l9A(@h`gT`2zp$Fz1g!d zj5T>kX#rU8D?#l$OEmC)@I64^prseS+9qPuUw!o7UWV@9 zXXDNBa!7j)O50BlX|F`t@*gpE8}(z2t{6pr?V*p>csq&yO*0Gh>JfnnJ2z!H5G zN-?oG7n;$5w37mgr)1oF%q%58Z8|ISSi6FeWk*cLoWhu)iID zA{(0=xUc;DPq>a~kE&c9n5YFIV6+jxL)K(S6vA*N`|*k4*w8PQ+2h9O$~pplnK|dm zzG_`NJb(N~+{x3zj!z+YN`Kk74CaJ<&qsD4v%E*pQ>H;bMR(@H)7Ls`O*SSdR7kxC zlJ!tBI6TwdS=o!Tfo?2ruOB4gBdt-&qsqkiR7OU(h1 zcAh0Ick;-`kubA-D=g2B#jiLkbp2!um1$(XC}beZ$qfAz|Hl@Oz*#RVjOkE>us*3! z%qXD$Ogi6utl3M^ea}Bpt|GyEi5EIos-ypE^4TtAWBvha^o$6{KlTBiF4KE@r5xUQ z`RLP*x_c?P6-jBW!sz99F@SZv*-BSj{LciBCR$bhIZFupz37^xu4{s zzDz+xeJP}!d*=^Q#*ZcK;Wb+uyORCjU7(4fvaX0YorS4Y%!l3ThArxe_;mq@bfu4o z-y?g~N~wJ|()wYg)pcoS$k03UUc?@tKe084S`vMfzq7`TXXI+iI$-(BKs?Z34tSaY zW@MD1;n{42Nh%<1E~2z~n5}>VWbgS}^l-#_UG&BYBRUxtN1l?(ECyq3K^l-7@p8uRE6^ zm^mEte92{;qYTL~MLG}1;K3;}lqRd7iFMDE0aMVwp9j6>!!cxU4CXu;g$h6Z{`lYe zIaOgpv^*{;eH7K?RX+Lb3zuDbXwtR9gwc6;anue|oTg&Hn0SnkH^9QZrR)t@7tATg z?dQyzFlZH53-vL9ewpavPPn(s7u9)=aHjuVzJ4Tbv`fX!wh7cD%!RG<3@kcFJyBRU z*ik=G`bQ7l-^XLz9z&R_Si!-zfPJttJ{@p{yc4-;4eE%cuKdurBxbVKB1ND3vDrVx z&zJ1~I!wjwH5SxeJ0WR%5e((%b6b>%==Z6xb+kp)#|r4sF`}!4BO4jkT$$2wil_x=Z-UK zIvO*Z+zR(P6rd~TOeG6LvA=H`J-H@0Q(b_$^0U#hqzEs^&@&{NV znf!8{$!25|dSQ%j7+LuGk_rEzV!0{P|qhd=^W!+u_Ww;aEjBta~~0;=EkZ zb$lv@f3bzKUlP4_^v5N+V)mj6+)$=|;tBnmQoa{yf4-k<6`(M#3t7LKc=6K)S2pRP zC6)89r##Q^IpEYxUwDxt?^b7kuB*yWOuojpFXb5UxE*l)i|}gF!oHEyah`o;z)xE& zEYC%#aKxhVL2w@(kHfF%KP@iDsGLlMf1q#MuqXN*I3!jyk*A$ig19;@3{$X2{A=pV zPXkN#xnrf?EKFCl#8vk?%yBHD$GZx-=871x@2&83G{B07;po<+hhbZo9ZY@sHs%+< z9~^|ow`W1l*bJ$!s85dJ|1X0+PMr?ueXC7$ove!sM}sirq9KOWT0yd*0I|#emn$7g z=0+4`hMQmzI7hjWi(eZnkgDGY>n_V-!+Ujn&2@)chQa@HhUSLmVG!Bli~qRe=ELbI zO*TTGSG_bEG1cx-Kub>|9$|>$)Jsg=q=!45Y(z%;q?$ER{ zEIpi#XZOmGag^uT>PFG8k19HL@jytCK0QxPh^DXJV6Q8Bp7F!^f(*Q~Fh=|Z>S#Zw zqjEUEhgR>zDCMUjY`rR`%W)-JWMyR!`8ux*pb71{yF;m+7)BPWBTJV zxmUYDF8`r7qG#h|jCk&ie_QoYY-o=1R_4GtaXz2#g&*-TXr^w`(}TGR?B$zoluG^c z`T6xFd+(H(bygD-E=xZ@Mjym<`Ub!D3qo^{0S=ERkNQq7-d?eTnQ<7l4~oI$<%VeOTZ%R7GBE5Q=M=v> zV5svwF(pD1HMU{sKiU{itht-on@>ipJ(_ca@KiG%d2Kqd=}8Wj3U^no)ZreN$47^E z!f%)gs^UDMN!`T`H#_v%k%cdu7j)g}iP-7XT6|@Gms1JaS;U!i^4pRU3kwN(+?HFn0w~G>5`{8Kj!CI>{By8Ey;32f-o82LL6UtgBv{-$3dn{$-A^q=fF;D(*?Wi(+8*#tyxC4}HH-fKdLN{bEKy>t(&T_|yaix2YjwF4WdyXX**Fpp<8W zp(`xmMz5+{wF80!=HW(3K2D#j#jzosQQ!7jlupsWD#=W2TWo;ajW$qOnunW7PRN-v z1ykaZU=^stZ2Jn-UrNP+<5fuP(-FGeTg3g#CQvqv!_g>hbn5I2%?ISIophw`awcK| zvoZRuBeq1&Bjdgtq4WzE>h!_di)xtJa?7iaWj5U}f*_|{~AAk$=WJT)=;2$_e^3vfP^dFdy;F~l~V{=UhW(7qh5 zj_GLKRfSVsyW(e`bHbhHXJtVU%Gs}~-8I8p`Vpq@x5c})ZkW`TjA*`RuZUu#pGm@F ze&0jWhGD4wM`6EI4G$iAV)bjDcRA$E%*=;%Co3HBbwLaFfz5R2sD8>P|i~}%RD+!9%_0VowDK0mrWAoB79DUFi zlJ-xOiQ?(}IkJC~!JTFChmDo~u#7dbf(MB4)s zG_H=r;&#mUm3Kku-9oHfZGoQ|0kH8&!_g0}C}F<+A)bX7w^YD5_P%KO{Xq=cN3QKY zezymaYdp;YT2)!l7;BG4yI_nSoPeI}E1z^E7qd$YWXLg4lkbki{3qh7vMy@Qhrq{A z7aa~-!(thAC4C(5xy>CTSi{KermxJLIq86>5^0MM{`ur% z*&??t1PYvOXx=r&^jQ^9>qE}}>I$i!H{jJPkumhOXtB^lNeDA7Ea)pwWA6CYESycT zLC3n8==vrGUndCcXeh%Dt!$iJSqlBU?ufVgAc~j^yu{U=XSf!;zgb|ZdnRo7`^e-j zuy%U{CLc9Gi8}di&r(=l6{9Jp9rkp3FG^I@F!i=K_Ok8?)3hQNA_te2+2U^BAY^7n zWA6(CMDHv{S7wKd(JaND+P-jG@*2Hl#Dk8MCuzijh!q`9ANvKiTY>k6aGZ7|Z zjN;eD*q@dLWwJ`647y^O{44R=k{rj_AVi0xpg)$AVX7P&$!|gX79@W{)gFb^a4J zUraG{0B4&QOt5LIExsgFU|1>Z!rf%Lhh}2WaWkynSA)#0MQA)+hhvfQe9!NN%T)se z`vu_NbXDA)>4?4GGkK?2WBo!mn319SZ#nZY>60;7lY`+ZWfNl9U{BA<1sk|mz4F8cDn*y;CmwzynO{?3&MtUsxT z*zVRCMUMBK4+1B-TR0<$gcg~+HM;DJXB9#tsSJ6WI^y`i*TOMc2lF+lfvsgeC3Rqf z2Ggs)(;laKhT>KIER?Gn~9NyET z_u0bJD;;h|MtWSuKPr6n^N&7}W(O8dM8F9u^;{YUZ2ZzAk!9k4@N2d#T7kmj6% zC)DSCcx8g5ozYl!Ll?gn7h{<+xi$GE(A-I1a;GoifR_qp(1$$Q)(D$Jow1+3pP&pI zw9OA>mT>|~wH(obeA;^M|K?6Cq@Gt9zWTp}QOsm)Rx-!u6hpLUJ@_Ug2f>`f-2N1Z zt#jkh#(!5FT#RGK@{ziZHFUsdF(UArNE$H-
6qN)L&f49SL>cKlHTO+&hjg&_y79XfrW|PjdYh>o9t{m8XkYt}W)oH^$md%t#d?F*r4FN^oC?wFvegLj;JZW)k;kqa#F zC))+pFXAwrnHMGv#d!B4gS-TCn>+YEJ9?h0!!XQH8^D@Y0rxnUzCr9TRED8K1aO$Vq`CCteaXOMb1V29 zu)=uH1l;y1K{KFXy($@n;|3a9zN#+jK|GsghGrn(@@k9y2B z*253-up=TFitAZJ|L%i#w)@29Mn$}-Ny7CHYVf8$r}T3c6ukMpk8^}}XeJJptHH~# z6n0aS;dP?C!{=2>kA(Z!w<5e$2j$y>v8KN&NRa^Lnq9zDw=a0UZ;DHIHLa70+jbvN2Emoo_vfY zqo@#_nk7*)>b1yC)r9i5K-^fP3CC{s=sY9`ZBk}fGtVF4m*R2omo>9g%5j@L@e0nD ztn%NAK8BCQj4Bnx%ekY^MiuP+rjJYY%=J8Ijf9ufm+y{%O4c;2jLJocUjPO=79k^` zCtUk?#a}rEc)GcB|Dp;tOKZ#}|FrWS3)~5EM(4?4P?zU?a{>DeFWx^)$zRI-Ee2}+ z5})R%!mQB;8G77%G+3bP?zTQh({G_sjL*{&`5P%lK;A*ICV!h~ALWEB$wEBxRpaNU zhonBa@a7J6&f*ZfzMp_dRRv_kEkccV2Iq2B_?s?)?nTYw)o3L=-s}tK59+wBK^|i{ zeZ5nyk^apWQpXa|t&9b<$rErg=gPpl@f8+yeYutctOJjj%$UbIft(_@>~FqZx7R z&DBv)PSiw=MA&RBgvO~~LT2Vt;SubQp7M+EMv6M@XO=ieu9{Do5tc5d$K+Zrii*hD zGg^iCPr1;)zZ?OPLoi{<6S47+3=}gQ;mN-8%3Cw6BU@*vqcNVc|I6PQ0j)j;7`w0l z$D>kF^ocsx>wWQ?bN1DoqsyKOLCqf(G^}Hd#OHhHb8{$MbjGA(N%-NbkCo(PEW1H| z8+C`#mfhiUxmAowQH9}t4=k-!L)L9`^k@C1C*y!0aSjOQv+0?vB0jek!G~Iy)6A>4 z3hs;U?>~w2t%_Le>WZP`$v<}_H;Z${?pU+3j+gNCt3;3qyVpwW2KCy0+&>4(y znXBTzZpvss6ODc5a_j?4@SkEdHS3(espz6+a5(hH@jBhN3~S#;;Bf%2)1*QCJ|BtS zn^mx7ejxOpt3mFVHQvzw-pcPUR?`_aX5nbQN{{P=GCZ%(VfIxKPCNF-z~67gj9u&5Q# z{h&$@u^s*$N*qE38D`dqa1i9#el=gFrF(Sy&$o)yg9>EL-AW4hqXuSdf7gciD0M`QO5&XwQT z!a*kq=gi0j_&1llg$Vj43?R+Ditg?#40uu4;rH3m^Y%vm5PvkAM3kZ`hQ+Xl8mkM7 zt41iy&p`?MxC8}%n8`-s6M1UqzviROI0;^6 zgiNdzX83-0KD2Kp;4J5Scb}D^B`OadR|{~!ZU8D4ei9J@Q?P8G5oT!4#-1(oHwH8R zVxkESDLbR`djOV4Xki{bd=7ze@Z>#tSk)^rcE@wE`;i^F}x_k;77usxiu`Sf8{VJECPqKrD`>e=kq#KtA&aGRM8YYQ{n9O{9p zA0fETc}UvRQjF=Hg%q7qnAUZ}^#iTqi?kwtuTGd?u8$otrZ~yH=;rm-sJ{^anSoI- z9jXrpxqS3Ek&21m3bFCl2eFJ?jx`$UWO0QcyN^1bL8iDhBMZ}evCjT)KCW2C(vzr& zI++rj-V=vg>g9N^(}f!ByJ95kc#DHUkl>u&vy(A)SLPrm-vYa0-0-bSDx9CGF{`E+ zafS52JTJwo(VY;T@KyAdP(^%!E0muoKy8^78a}7O@C!M6`y8=tL_CHZ(?U&6F}bX% zDEnK4?8M=Cwd1yEoG%Z{cV6g}q61^jzn@eyk0Hzq0WX|zz9kViml&Yvco82-aqTN($_b$pu^`$wn<~~ z{OjWMXbngQhrq6%G6JRoR-4Gf-DQO4NgU`;3Wpu%_j|h* zgd$2MeW7qg1>tSvgLNSv`l^8K8B5H(AB5`7x>zWRk;t64mlq0gDxedzS}zJ$Z)H5- zv+m_rZRqFdL(e(~GRgG1Jf?=MAPnoKGJDxM8v%SD2UN&f)tP{^vmc5r3*?wj7Rnqn zElg3MztklIx|uu8Rt&7h>`V*WbM0P1^IQqZy2LT zawaaCSz*Wmn{M9ynfG?B~q=NEgK zm!@!Yb;fDgXf!X?#NM!C{K!i|g(LMI9XxMF!%OkjsZLDzt%_}t3m^&=v0)oC*j8mC z@Ul5vRQz!Bcszm@YT(+)Qn)3jW4B@{UY_Ygrv7&^^VKXAJfQAKF!zNVi1$rdP?KU7 z+d&`nXP+AXpD|8<$;ZuR){&A0NWb_>=q!FLR&gHQKi3P%^aBsOY=YibbKrE?3brQ# zkY1mN=0&VqQ%W$TYZB7QrRwN;D_#%6RlhG{oQEtfZgs-WJz9u+MD6zs`q;a1N1WmS zJ5?Y0@U-F6qX020l6c(}VE>8k$Y^R6;mS(ranB3tr` zjeFgOv?6RxjfGrlDHhF>q7U(eNUG6*=H(Rp$xz4rK^9Qo!n#4-7C$e|!JCQPsgGpt z$8q{$_ot%1iMdw?Iz#_Jo3OA@LR+;X4!u=F>2(u4<^8}lPQ~4{p~>FkwOl znx*+1+uI42%*@(7S%Fy;jtIQ22lM@AP!7t)#&{cawhM;dxp>Hb<9_J@dGJrtaD{t_ zkJ&x2I=4~WIjMrU8h_M=X;T+&P6k5`n%CIFVv`$&_DDyLH|y>b#q?Asps!{*6yJOh z@<#2#%7Zx!*X?jDNDsfs50vJsoU`u2d z+H}fPMC|?!SiHxycJ)&ybk@gz%J&tL|{)o5m*=;LWz5vrGR7sxrSbWl&+_Uwrb{>t!J~NoAy#>AzBoNVb$JqVvWBh_Gl#{kbTfYDdq*f$iO{nB0eRU z;={yv%-EuVOs8^OADM~?b?lEO{SyA}Euzo!nb;I-fvnyFGY)gkW0Qp-{_f8=`oqp| z0d5Y^g7ZW^qrb=epHH#mZdZ&Oa6;UbX5KWCCo+4Z)Sj< zQ^fs{BFr9Kd-ppI<71QpDGXov#sY(zXaf%O$@WM7HGmwD7( zSeN4d4*pK4cl*HE*?sBD;%~G9BKyt9uqIW^=xPAZfOLrc)Lds-V;bwMT3-$1Jk7)L zj9ADsZ%K))3$>`nLNY`F)|qZt&HlZ)$riHi$tb&Kf&=D(_{d&gy?!oyyJ6mq-ikPOI7h8NC%HEWup;hC}fSeRYW4FYz7Vt$%WP&<{}0c;oh6~ zB3=Em(9zdH>gZ5B3s%6hCPOr}XTi|Y7~ht7VfFi1?nJ3?VIOwn9`|Y&i{O#dDn6b+ zEhbM=!Pz6uI2oWoU$-Hu*+2bTW(FNMC(Inl{Yry8wM03%*DnHlSLb1D%`0IT(jew= zPcm^`Fb)}NVB%5kwbyW75NwLb8KJP!k3oW(4t7fwBl%<|TE>^+ywWIi@AQ-3p8_iV zy~twKKu;$#tpApU&GbP$4zR^y`3USNV2;*?a_IYH;MT%&ewTm6Pm9iQkCnldYBOBw zt_pu0OLDQ3aeJ#Ro|QVH^Q&moJWxST&bzOCj=*G>BDA!2f=$g!QT=lk9=>xy37=yd z`&vPJN+zW0%#pm+ADzphp|!^lir<+pxi13=lM3;q>4j)Xd@QOi%OSqW2j0tPqmXs{ zc>7GKW^(@G6OPK&@kk!R9QOj&$v5JVs9Ofr)K>BN*F)j?MIJv!`67<6`WHRPXOquF zTA(F4?qtG^PJq=3T|AvyjMnjSI3ZUG>Gz+-X!$nLtf~a_AC9P?pD5pgc_D*x(4D^z z&HWKrZWN8*2gn$xD8dH!EZ9-cshjmv^gR4g+znNR^4361=l#Xq-V$A(Wnt?ETimkn z#WB`aBX(<|5A*cq`jdlURu0dzk45)eZ$#-*WoUXcJF!9+F&XB#QIv&e&$$PD6ar&o z*1oS)@Q1%272hZva;t##ie^!*)D4q;WO2NU9*XX1VSIOcNY|#L@`o*w4~N2s-u#iD z^%2bPz57G%)bACcrfs(vIOvopd>24QMhcFETH}6-A$Q-!$WryiK<0XxzsZNL@lY(A z#=U5NFWl?70%s3PB6Q|svDH}-BYu0}w6YfRrKqbMl!+6=^vV49gy-Zyto&k(+ROO} z?3;x@lk;%pT_*%c?-A+q$)3SC|W1mq^$b%!Y1bIcmx>P?c2% zoz_7ZYF^J=8aV{b@_?Nu?{{7X7_E|l_&>}H?`4PD&cUczsEuta^O5YDi1ibgADDGV zn5kV5iKjH+G$b80x@x#|iaNrYEY1*3Q0NqifQiwtFd#=zryO~^$Ps*3hV}vdp!VX9 zSkhk!Iui4-hiV?#WBtFt%@F2LvU!8muO6#hx%?<(yUi<$pt^*a>@k5CuP z>&2ZMl*~}-f}YLBq3g-;**FuEVoULwGn}u@WgULqKF6s z(3^YcR_>@~^>D$o!x7lHUJV`x@{vC<2@TXYD_`ga(+MBN{Q_B-o^wN!hbF?$(r0=< z1A`4saCDU?(&B=Va+7Su(t{snj*l!RS#3(()bJLJaJi)!km8_0cX}G?Z)L))?ym@{_$;Qqn1&HF<3K!o z9}P)=&eo_$zqBeDZhecGarjd>xHgMLSsG|!|EEcx@#uVW*5bLSBNw;+j2{-Y$Kz!h zpZ~nSRMI!s6j+YTfB(dCelBY?Rj_iD8nW9qhtvkV=sE`rx`&bXt^?(g0{q#Tij%U; zq4FJsdFBmb%N_-&Xh*>Dj}iv6E->4jinwXi16s|2dP5Wp_zWE!NFUMQ9H>&4XETn& z2-CabIeAXgnDscfMjNqRY?uw2gAo_)vE-C5ydR_>nnjBaZkkHBsGli2t=uL7IFraWAK|Uh#zxPObyn6cBlDJoJ0RZ zA1w?eAM`wPBQ3P_p|~&zE{YSdq9zkY(jJ%;$LCB3&#Tz}Rn+E6;MpBrT+9f-yh1rV zWq;b(hxNo76J~vRAy7IDW%q#T$`ue-bK$q51RqMq!DQqMv7?z@+ivMNR;P~ERkkQS zn1xC;3;1ucfOtwx16eY^tCyhr&s^+zu?PmmKSg=o4-vjn0jYN^d5<6$HOLqeXR`1} z+6Eg(&qs-D7(7&z(DPId$|gl)yjuYZP5+5$;mm5ftAa0_vDMY-VDUyH{4U6*cFG37 zYdrCued^==oI7%!^(-tJ3yw06Y$F2sIRsHp8Gc`hk+E1zP5U1>eK* z$wdRx49c*jF%8SfXP6e+4G;If5W8hmaq$zG2Ap3UqE@DDdKN-C|E^N=L~Uy-wzSMf z9rf(9Y`Du^T?y$K{ctj@LAWPr!>c(RZv7Q8x4{~=A`>d+X6W0_d)$yntlO!L#S0hW zr&lHx_3QZi?C5#TrGv2e;S14Y755Kk7U0JeB}6*dL6ftLIdjdh`Gf=ft>ZBz#{dpT zEAj6E^B<-&-`-yW*2Q(=@+EED`;dYR?mM!%r+hFj8yP=MvG=4eB&auauFd_~yhE^rHT%-MAFCVafP$xV*#X zt#$e=tP4Miol#1#^mM^?IZb4BGR3O}?Dd+h@X|W~x2R3MN}ZgnR1q#kCzGSw@%Oo_ zrZ3)(?TRl8CnJ3bImF7E2zqOZeT$PZZ=?-=Jm73)csN#*zi~y0{R3Hc>vt7F^-8-) z`Sf0#9zKhl4+}I8WiHqf6TDK-!qF?#KsbBjmJfBt{nhb9I}fJXvDkdQ03H4NI(nYY zmOijq`A0lXAZNC2E;8O~!>-mGE16GO^U@j<>z(1fBN~cTI(VsAjK5Jyuxu&8>|;If zYs@b)9cRETN?>Y+9VDKux)V^hTbqixAg`{nGt~_S%R57rI<;@%xNqOQtb7Ec6 z1U~j5N8_xvF9o{Q;q?Vqx*Ikl7oOkT!1@It!?$taifXcqKSputlvx z8h+H8U|qN)F0e)u+=09_;r#MX77oy7)zR~AR`*BNviHKD`6|2X$OCAT!>Y0-G*oVgh{(srt z-rfKYKb7I`t2}ghRDvDHE{j7$)5SJ9Hx#q>>c@QTqC*NmelGLlwc&cw17W>5uj?U$ z1IR)95%%(zs6&%#6k~5RishCH(6|_Y0oj`P`@#(N)aWnhXMx1xP@J5|yTBsiA%dZVW-$aL#W%iV<3$iTE=GaNpho z;me!E&2eftxiti*Y}Jvu*aC~+WYQPlfP~%lXuO`t87_IG)QA7{h(bxu zpE`o%G*EOb5B=AwK}#YWlMX~;NV*z^Qde^Sel~)t$}!1s5C#}t6KxlnKQ=NK-4&If zwbv55j@17=CeJK@T7&sLL6+Y^oJ2KQ)VsXQ#nHp1DOE_3-ey9*nw% zVc&E^e5H@bu7S*H*2Aw1=oOK5g>(%wGxoHL;;y%Z$rAEU3__5;P!k7ck&6?MiyyQ2 z9HSPhncVu$)oSRcQ3#(i$#}i01d@9t5iTUqrw{j7_nfiMQXalT=u1{f!Q5K%?oBxR zsg6XdzdGKAldmC7z2fdt{I^94ZENer4pU`p__6?V$RXJJpFOIq($I9o7+N{>Gmnge zDfO~GCgo7umV<#Z{MS zE(g-O5vc8+k1zgR5ZM2T2w$a!z+yi4C!UgsNfz*TF(|b=QDAgzR_b<%#8?7#+$V|XtL#fX>KYC zSCt{>%fBdMd_Tuf&R5Jy6y4 zy)cTAMciP{oX@FZsT(y?6Ug^jZG;2=oZ&Sk3nlS&yeaE>N`l(~^>%AdbDyTW!LVrpSIVrMTG4s+p@%W=0 zH1zzjb*nBWoHM~-*KDNDwnTZXFOH;zA@@HW=H(Q_voRIPTBYdddHdQWaijg4SP(6X z7;@m+_6USD=;488CN?}V#Sm|@oQC;BYq=V%Z%_|p8ij7uy!FZWEp&{Z3p*<{^srq3 zk1HB*n_vWo6!L|)(Pz5L18cJ)P#Vp8!M6y}%i}R}btz_BNFcD`y{J{A-?x(^j=xcb zq@yJiPG#V7qA}i=`(f14a7ccmu33?J+$Pz$dM*#&W(>w1#YaNbPzS%L<2({1j{`c+ zaCn=FVbubA*dJYtpf*mD|972zS7#xP6O02KEta*Y?2WZi%6d|>u#bA`DWNEr zPR5-QBb?vGx$yE_?EF-U$S2*=E%2B~=RWc4r$i(@R)TaNYdDB3XdN|y?jARE-S=9cQ+-XpM-2UiD1Q zmgp{dFI{h_P>93NOc5FkR$?RbP$Kd%gv{B69HTs~Ng3O@_sA`oQj5VfFb! z9A%!oUUDHqZVjW3Nq)Pi@g}W;_($>q3gUuCcFE z_^c+U?f>(6r*C$LaqfHZPeKU`=DWh>9Q#f?8&p?iL0#7ZhFQ%1xfu(C4`xtvE{1!* z9C)pxpSgqYv!mxpzwC>2{{C9QT(x(>a9B$nL7fSdk}{d?#69j1TS)haK;8spoY+u? z{Kj}pHLXC`(t1%jdY5Rw6pUwwD`B;97CgGp3#ea+s7|v{vv?jZEr>;k{d7D^TEba) zE)JF~!5aS&(6c%#{&~v6VXp^P_nQsL>$4#hlZ8QdfXBb>>HQ4GH2Mm^apoQVfQ-hL zT)gWgiGNZrMZhmb>K=V@mDgvapE1G}=?{2shU~HQuq}>+68Di0<4O>8Fd2VqOOf#E zqgbr)Qq(yyZ&2S4Z&&C*=BzRN-I)zBf%-!}^GA*g$BB2kIA~mqW%{YuWl%~@x)ci5 zw~96&!C7+{_r)qW5ov)9jk!qHv_S8B^UxAbKG7@suAY)NQ=AQplNC6;uor4|e~Ckv zCc%sIb&39ZI5W`!>+&Gko!OPAF>S3}Emk7mfv~_*0b!$@1P<^NF1G2nA@r zb%T)(_YX5n;HaJf*V{(u?CFf${Xy^!)5GGK%suLtj<3-L2+{9^GxhgH^(b{1^Y3q4 zt%kZ2^wRm}uqWg9cbUFxdKy*wkRQdq!IHY=+XtyLzs-DK|4ZV3`?P60!_l~28|Ca} z!{Rw}q%X*gWHDf+E6#t{g{czt@8Q|# z>B+fkoi{o4ahNqi9}@~ou&9N8k>FA^#rB0nN^dB2n+pA6Q}iXL?O2K}w!h54zCwG* z_4LHUiWqEB&Y%qnX6$S~ObQ68guI-qQwHWY@NFsC{TNlz@%>7o~| zor=Q1Of8i3&qq6ZcNRt{Avu`{nrDfzJ}y`@Iu6?FHR%t{ zhoo#g?)emAgGzt=$hE9Ie}>Wwpd$%9&M#5{^@tn6!!S67_yd|@o&4r-&TTp^ZE zOvawiMX2-c1^@bo;`}pttkZVm9z+ARGmH^8KMgOvEU~`W4#T*=^_!rI*86#Q6&#LQ zEAG*H){|jPCfUEqSngng9}Ucv4VaH_FBjmB#~fyq2SSH6;U&%l20B;4?FTj5i^@BE z-qBm!Rr%Km!xPLMTjYkm+*O>rOda(59NeD8?_H4@IK9XPFkyz>&qCa1{o*yQ5cMj9 zv2^rEN>gajT7k0uP z<|3A6Dq+tpH)z^v;nx!@j2)Vd(bKFjk+b*e(gZ~RG{At0VyOCOBGRiEYbTu*gS5Ni zPQ5HHzqZGU@no5OqK8t29$yJla&c2oV;hCQkh$p7&d)I?29htyV=4V9yc*w#mhoyR z?B^i%9CLs^#|$lUc-R4S?B@ftVe zpHoLky8(IjS=g^=1{VWI_7Y(jK;6ixdg{Sy5^%pa^<9Ajkbml@P@kvX%22DE;~?dvj*;L7X_Hjy>I7kU3f&D>!cpyqpTn&*msy zXo{4jvH07dgxs#2gS?C)KeQOqxBrO|J9}fPq6~_!*rWNO7EFK4WByP&nHa9PHo^;v z?^95xr-^O!p;Xd)&=|20BSsw&hps#nE0=1+*rpJTN6c_zJN;&18Th4Pi}6+IP;1S^ zWLJH>?Ya_!$r*aq{$GdByEvgQcBwXt;q;X!a;|uqb;DzKQ+WML#Ut{9huGV|)*%Y# zdXXz~rw~cni5Qn!h`~YQa5?XS=u5rSE~f=J9IZvIiYXq-WZ`K%;4Wnc^-AiVu36!R zQz;H_%)ubpq7J_<{ZfBST>ex{{3Vb6o9*$5JZG;n)L4wqL`ZjI=zny8_kZLozSJTI zIv1DH$t$78Ecy`l+V_|nXd(;09a`A2T^C!q7h4mRfm2PU(0JmD)dQOHdJyiNPP?U zEAdvnV0x)dxD+X1*eX}ZI%)CxF~PJm%q>l`K>adzJboC3*J0da#S}rYHWll;mEcIv zK2TfNDxQ@oKuX>PwRg49YnmClB$F#xCQ8P>ejyGi><7`gP zIyvRJ7R)-!#N{b=WIp=h(DN8bpVWkeVF4_*#c^lO{J;3l7{2L`h`vsqfPp=f=acjO z)*4POsW6W;z@9eFQ7#9eZxd$+=HwjGlQ$wN2PbmBimHaU;#Pt@rhl`?ub z$1Vfx=p2UTr=cilwu8?szRwv2SUooz2dxKUTTYw!Jxu|J92Q{0YbC7QX@a5jZ~kpD z#+`Gf$V(4I{|lL3Ok&)^22$urG5iIx$C>T!$X7$sdXL z#;E_ov0|4dCevTq@-qR~saxyl`|RlV*U|Gj`uBD8eRlLbYwtc#n9v}m0!X3~)FkDoWhYCYgzU6K9hNFdJ<~ z`6x2i#UYC@SlPxRf1@VuQ*WC7C7XMwauixLirnsXB9pu=X{mVh;hxizKIoY`S*Y)2 zjV*72ajP>m0|&HWv%3Np%+k?sSS7;VJ`v{4uf(+-YS^3Ojv23*=~QGyzELiYbl1lq z`rsc>t183ZVDi2~)(SZ|QqBE)2hTgDe^#WP-!7DAOpwpbuc7$5&qoDL)_vDEZyBEj-Qdh0P3q8YF$z4%-0R$o@90|toB>tJau>3 zk0Wqp1oTrzkav^=!W0=z{$9tST^B~VL z2hXMyw|=IeaDN#rR`-SXfkz^X+(NZD9}M)-f_wsVQ{PhmS!auBPhH{jCKb#2=prz) z1d-LLkh3C3P(}({?;RBi8&oh|CYCz^c|>kLa5ty@y&je9;6Z+{@J6wm`#8Z{~-DVY`$rY@Qb2lWHpb zRu!QnwkKLglSMA2fR?ZJuz#*je})Bq?_`bzeH!7Mr5Z%>-s{IbWqAoR51Dm1m%fle z-Ee5K1e~caA7K`VR`&Phd#rKFluU#gJCx)Gpz%^1zMs&;{MK@)KFh>6*~$)oE~%N_ zaQgOBF>Qn_nl`$lySh4D|C(a4eg=X+Tfx4<5gV^?USO$)&+&yQ92<+t$<%66_vEkf zP%J9Y#Es7W=-ea&-3{~L6q<-vTaD1FLB7;#@@D2(pv(Mnw8-UR`6&8BE=ppS^#_rA zKmomi-4NV`JQEiS)E;Dha|H8YuQ-$S6OZ9Db+J#f1OdnCC3{kW%ZIz7PxC#IdPxDl zNBiMPA8NeGb(`Fj4yB<}#TaN;f~$7<)HpQ>-K2Bk*%a~x)5-cg zu7EXr_`D9t!mimCkdF2vpEDWJr!}!XrxZi2s9$|v+Tqu2=-vY-oZE!#EG0DZ@7r)q z9oq8N*zqd^`?W1F^rt8OQ+G6A)8v|G9+I8A!MrCh0GE~_=q{Ucv&5@b{J!2 z1$nU*7I0kW4Wk8-2)mSImK6T{m|ccbwm3Fh<)LXPR}eXSoSH{4+55 zk~JCzyJ7S{=0^C?7x*(9pWPyH?pq#GLxw}S%LmcAQVu4Qcy0Vq#WX%|znx9N`*;)h z{$aMwws6QyBu|E%8UJmu@T7l2A-OktMST*P_hoU;!Vy@eiU?Cn7!M>z;-5MF$xb-B zAsR;wXXD}KV%D>1NR;5ewF8hw4%WUP8GN5$kCGl5nCWhezEjh&NS=IKa+H4j2}e)% z?;HQ*W6!4;Xm%|`?CV!T&%IV`xupih&w=Pf9#rTgeH@O@;k?2e;e!LHCy0l+I(=KC z^08!348Gki!r$4W>D{_2B$ZTPA`^z1N>wC2F@ZX>`lQ#HW2dY=COLh}8x zFuJmm@259%U%eG|A7tR{+F?t$Z#0wmB=D{j)? zf4K|R&;2Xj&!WEWzAs+US5jhL5a5<@ru`3w`K8IF;)rRvn$Z~a2%X`$nX8o6N@rF ziLQh7;JhdXQIYfo`B-Du%iO1nK&3xi;DUY88!EKKSfjiRSn==r@HtmgGc z$Q&7HQ5##U4&)B!eTg}D#rn3mca{5a(*=m9Z@1}NE@I`A;lHH-w|xen`=Mu|;Wy__ z|AG)QW;*n=nIShloPt3!OggHKG5b<*k$LNii!0G(Y#jR4mE!S}FT&5LMM(D~N5aY- zsZR{>vD^ylRC3W6 z$;?u*#B%>~R2A5x-6?7}+xwchb4&@+4)xW-`*jvL!Qa^T{Wj2?MNiA6RNVWmjpQ!H2s)XB zQ^!hRwQLj&PJR^qUy4Rm=J)3dT7Lz2Cfti-D7)yqTWFp+`^swe!1^uRpNaMV2 zkB_d8*V+#F|I+ea2*$!w^8%tMhG_o+MbP}oTl zGxN@i=z0y9&hnm$E;7ut4&i z_&b0(!e_iOr>`nBx0+#Bbq17jjUjb=9@K_LU|@hAJonN^8Ib`K>tfE9q_9Ehi|`1M z!#Yz31T53V&QxQJu*yR4Axr%7cf<(J+8TnikjMV$U0ecM6N~8$X1?Kw2C-8O$!8}3E2Vap# z7xdE{UcWOjpqz8_AFi-!j70N0eH2eCh2MVWOsN*5RHYM6pR5yWo+zXCu|M2c_r#>} zb<{^LoneW&oJZDMvOmzC4V_~}=pK@eSEN*X^wbq+L_88}vB>@;;tqiMTBa|*m$A$ey zI8$McL;g|Z1C#eRtr)#^5^#J}F$i_T`G(tK?KBnCJc&fwC}k|}&N-jRz{w=e4XW+v z!KcSwnsb{x)`Qu}SSLz5{JM_*eGXH);QfS7XnQ1soXbX#`NX+YZzHH@W#S0^3={r& z;zv+0@|JN9Ud?5=2U_E3!asG$9AC~Bx3E1qjbEqWI6uh^luaXvNPsrWZi z6OWrqq4AOZ-|2FkAKD#e>u!qG`&F=tev8_hyx;X@zKAR56YT$2$MX3=PSWHtDme9} z2rH>wAFzV{f+xR4iSH}%x0eDwT=T?=*;-hoWr=HhGtq~8`6P&zFE%?6w~`IuI=&EF<5Fm*+ynRBh`J;RU^nE8LoW3W{Dk-D0 zD?O~48nDeXMgaT29rY&A9v2LW;%KDX2&`RIh==P^VM!j@2j6y~H|V5T-J%Jf{bBf0 zt_9;)#`xKigJd`IV4_0dNG{H?amLuPg1O_(+1Nd>0P^{rQ4{n*Og*3i?T=oNV1ag% zb*?-8=>NJ|WBfxeyjFRTc1kj2`aq zOh?NQ1AH>@#&)-0e2`Yf(}Fxm#l<3%{<<0Fy)gW>1P=F7UhrHS5U7;pEfK zq~GR-Jz^Xe;6C-zJwB4>-ZKj61-#F9@Vrm=$6!*A6GD~y^_QGE{hh9k6$X56Zpp#O zW0pvWf~9QsIP&lH6xi*&sbX8gD|9 zkSK49kl2N&Oe{qBui_Z$xMW=hls`Sj3%&0{KvR+p@4knR#~}%#e5!j}O!b zPVZHW_g`YU<1NLnRbAn4^GujHQ-3X(*Wjgs*Eh+WznO(zGwm^|ZXRN9r(;&35~fp^ zFlr^a%hW@!r^ox_*E6E$BNgP&3y0q>>foJC5jT=tF-7tKbz)(sn}XLmWC!#4{@-I} z6|0xQ^u~L!aYVZaoT7l+^-eIkF%yfHk%bz{XV(!^Y^FD40?esUXY*v%-tXIb=a&5u7=ZTs=LU&~$rqL7upFQ9+Bn;)`(5PmTe|$Ru z3iGI^9ylE1-aHUL{%~*jA{eVy@V*|dpg!N%lYJxf73>z-bg z?V@^hPq^64f;a2`n8mYk+tMBttyvi6Iu9QrL$InVcVZ7_<6>hmy39<$>EqPRBzMQE zrfb63RucpBLr}4T+$s$t*s*SH+ir+qgS=5lUD`q)eKgaD@^xbtc9Xj`-u1ii9DYf> zQ8vW3m(=rKQbgE82TU%@MPjfyjCc)&e`XK)!w|U^OL1{j5nlQ&LeKI}SU>-h2-j4E z#Y8W9!!=<|9rki^3lry&U+7E5aRvQVeBC{A6&);dG3YQkyr26(x};SsnybP6Z5ZZk zn}q{|UEsGR9yS%mD1K*;vmay8xZMI{E-d5@FAudDMM%Bd4Iysr;>IF*)Q5TEq%nC4 z+!vdT$wVgg-g{QKAe7qTv|8$<>6eJsj)Pj4a$FeJ8G}kY!TpOOV!fTvw38aCQRdjy zIS2O}ZILk050f+^@P_x}GIEpK)KlQqy%e1le~PoN55?g3D)bcu;RgGQULhuA8)PHv zg%v!{dtg3$jpohDxWAYCKj}p1CYEx}ECGp^mxNKRCYF9qz+(q_VByE_!%}6z5&iCZcoKZcO#h0 zwnGj-qhG8sRy|sRA4-Mvm6lopPAk;0!~~%L~JeP(@D7ow4S~o$iRvZDF`)KrU#n z0{os9j~}0yb8(?F8i&6V-{@!7I_`><%QWcqw!}elBr5Ibt7Bch_jw}oN~rT6T1-Ap zI?9fhpn6YNNSo@D1&dBH0WFs z17uWiZH7Bct!3f*hKgso1gw8SV-J2SyU~(5S;Hs3cllrk@=HLt~ zF+r7=HB6MlA^5$VB-g>`ej@ghC)d&Q8aj1_! zmo|N$nxU9?Q3r=u4-RMFfBP|Wkgg9zrtS-I>98hD9y+6MIrBo;7r*zRZssGk<}1nh zdKrcf4d$@wRfJ`Cc&%*9!q!nJHO=>9*>&hjtIt!=`9 zfQpEyh+T+@(%oz5ZbU*6DM7kKy1NW)#m;SQbsKxzf!*DR0XDXxzVp0)!u?%-3*_dy z&UMbrF@`gc=n?~c?pLnfD#M6L*;p~E5+0%Q(6(M9CRCcierh4)N{o=!#;TeLrMO?g zyv3@iXsVrqx;dukXrjMk7CAzTYj9FW7OghQaNni@2M>13^|HXzTTbw!FMVRG8-^^H zj;r)t3@0CD7yaS34Rf%K+>%9I$+%Rg_ypw->#J!>!Ow-4CnPqtLR3y&q(@ z+l*judJ3jBY&|z7X7v%tGfUl?5o&YKLvfPQ_!2e86)V&EIgi$ zy!I+=`=N}OU!7v;*D?G%^@YhSYg|lqh6C?G-|1&psSSqRk|c~#Cx88ZC6dR?ftO4T zeCKs1FQQG9_)dgIVi*oNGQYz2k=2uY1g>?1ymBD@-5KbeWDYsS8v6FL5YfF>;`3}1 zl#%`Tw|KXEBHpi3&wA_E^MuhkyRDS?g~@f3bMy1_L@An?TlmX;n1g6p~QY87uO1G_>20jc@=K; zk;kF$zs1%HW0Z1tbzr+8qSc-8c_p*>2c2;;Hw3F@q{3m0B{oc~!RZYJ(8{TSYx^m7 z@!b{GWmfpsI~`w!P9XQj6GwOCp?n?ZrmHcK{hE$*^ero?&xQCzU#N90wP$(kbNeK= zDrrNR{2b3KPh)C+tS5!UGULOprOQ}fZbF#Xu6vw#RI$v6WWaZytM9>GJ#(iu0 zp%4_kH^9JGPV{yb;Pxpeoc%c!l8PPbr5~X9)%WG zxHQI*yd3`Cm)YSGCPS=dKWrp>0~eKHAA5D&LrOUZcZiwY?+Mij7KreP!5JTY=-y&a z-JlZK{~;GOcn0Dh%)!vrCd{spvrw9k*VI`g{kpF+LG~&eeD@_sSv3#Si|P3e?2YJ#OQL6tDfWdXqEp5U&g_4fz~`sw zPzQYP8G`RwNqBV21}n69AO5=tmmXDP^XOkfq4bqd%xB-y;2EfMoPgqG>?3}V25Tix z)E;n!s}Z|Vn3vh-R|gyB6id}>v3H9iq6|NYeQUIEA)0&Z&-8nrwnej94z4bCz~v3( zrSo2V!i@U>Q9{;CDqhbjh5D=BXnNBwdS-G?dO8JZmFybm!uQFvYOD)l4sd58OzE#X z&d*XR_XWE?mSOp;YWSJ_5RLXHg`2GzjI~No&wNMI0ACnI<_fwl@;CgUAMv~7L zuzenVTGZ9XG7sR~8&VC=#b3enW$lPW;B_4wJ>doac{v!uo`vowU9rH5OfY`V44He6 zRHW|NOkT5uUpMvI0N%?#3&+NBFb(vA>s~{AUhRU#VVvi~UGUzRdSegfkAlpQbFUIM zi79ZbtAwOq*U0}DbGx%*NekzzDHdZ~?zZ@ItT z8IRTE5Gcg{5dN#=;5&qVJPR*Kac)i7WDAwWB?wzg4q;9>ITkV4KZN&s`n@ga|DVLZ zY(<&D(9D)aj@v|xd_xXgk|kE!kuTd=0O@*Hoc@rOvl9+Ik-7h6Y2x$hm}sk`__SK9d?<0~Nv)^C5F?mlZ zGR&Nq{f$ENiY(mH^+7#-EQwRd+`d?i)U;M{_VjJxIMWo*ZzN;HD18j;;fz5&OW~x* zj#$KCbH6$G7{+}9`){)5=Hp{Q1Kit>ifF@=;`w?q%!U_Wr>Y+EqsV%$sKhvG4l!q< z;2+7}Kxc9hG8%9!q!=0#InQgzLdEf!2&UehOFcDslM(KR2rL>w?#L+4I`5|8dQ&u( zuC>OKK4oy3U5LT`%202wfcD=n#F{6Q@H-{|%iV3z{f!ISxpxe1@x;8mNbWRe!+e-6 zHd8m0f1Lx5$5r^m`*daaL(%&NdH%LuxPI0GgU66J`Mv;)_zcw89**axS>&P#XsDE7 zYeXiB^UASjZ*}H28%~N^UC+CD4>K+)M%~XxsuXcA>@a~`=}C`bG4YkaN`9YG+Y2%DVHuR( zD!~VB;^-zF_ZJdA&aj>Co?Ae2Pp9j>quW3V;{@Zc8qOkl1d!cd_v9;e* z5f`hC_ev3{wPUWp(hX9(bCF9wQYgIHLzv1Q5i@>HYH&Tnzp!tRTx+EJspE(!xQ}R)LcOE(y{So&+z7m01^t=7b&$WsX zc9FICnEQEU2lCSlr()N}sc5}OE7th#z>#d}Xqq$3 z`L{f>BbB*Z3pDhtM%$PyeBgca_J0cOPiPkR%gs=&pMhg*N8(V48|>3&;zf!rZhXgPCBagI`h|q&UCHc;&|j~Fyn!}7lY#g;+yE8{ez?;u3$8ugpnTL1 zYnNvttB(WzK3IbS&P$1$>mup7xL@2R?saJ6_QC16%lomuzcba``9~QEwFm4J4Jj#NO zw-Zh?r~g&67*@>XU0B!?cgN-nZ}}vQ-PeTHI~u6q{rgyD5%$k#_qP&xjJ--BLw|_% z{3d|>9`$uAB>uXmW4lB4m=sK@hYr{=8TF~;v~J}*zN3&`4USm+Bapt_Xza4Lp#EHm zxztzA`Bh=}KV;&Bw+N?c+ML0r;THLs4~(7gpAmJ#ChE*r{jei911r}V;+ktE4D`~N zsi{WGar*qNlu-Y5ES_I7!cBQg4BPJnyVOE-$dIEvkG+AT$vGw?{P_5CL@5@qhqqkf zuT%0?U?1vRF=_#G-r;`GB?s$uu^aAEpAH-CgSW>*aGT85Mc>U4e}lbHOXomNvj!)I zNFj0G8FAgJ#@q8{A_O5^8X zY4rVL#65Kq!Z&kIUFCsC=Zes^+6xm#PJ>QvDzf*QVBOETc$kxi&sXM2d|vAvB{;u) zF2?rJ!_BvG_{Q%?^?@roiVBd&c_bj*7jnN-U`c=Py_a=3OP@z{OdYmxr;&T8Q#AI~ zgDIb5uf~%{8pHnKC&ifdiadc=Q5cZHJl8fWEIrJ;#J3#ej%BtyRvv*rE{S2x8O|9T ziR`{6C_Ksc-xlr*QXDb!wm&)=67bJs@{Z?~;i+F5&IMLV{B?(Jbc#n;4vD+z<}m8c z-_smxzK7V!KAKv=5O1igjK9Zzn$^RlJ{GF-H8Im6I@Es^*b52@}&}=r>>}ki`O2CP~IEV&7F`)UwhRiYlIc& zAZ3`Kuht!7`piU#9{q@pWY=v^LtJE$#NVf+=S}NW!1MZbVub)16=fLHO`o$7KVKh8 z;Zx0=RzF|N`ll2JN9dusWiHw`=HS}H1uzNi3hQCl#rc0UkXRLf1Ap`WIL-yDpBCWp zP!GIq4951g+1NeD0^e_!!`NdsKBX}CySxv*@ykVC9COKMOYwMzG5u`h@iSAHXU}_8 z^%NL=DuF6}kUv@*$f_-ZoasEou6iM&L%xW-@<~YDLw;E;dw~;}eO;4-HO_9Bo*ad5 zsp+`k$h>k7_BYngAsd!+y}LZ-E;u2K1{xqdp4nbkE3`iMg>qX7K5ex_cY4*2duCzl zNos@ZsxXXRxdHU>_%7;>VxOm?2u8RfVoUM?plzY`+_Yb0KwE6H?@6{k7d3Ut8SCpk-fHsRw#`#lRzY4sN;F!Q8PNdCR$1oKcR#QJrF| z(F>s(s)LP3{IINxF{0>0QLin;xOJ|0Y!Qk3A2QJSo}8o^XXYRGM2aiUeI z$Qna$Pa=kFw8rLK7gW`jAgJC2Y4fv?bUq!U=Gfxihk9sp4>oHudz~lr#NH2=#qmLU zI2#rQ`88(HRVCB^VIgvN112W|G1)B|p3}$_nOTbFwj7*@E0g&CmwZVDwT_3y5Ayj| z-$_C*>SS`(&bT@u|Nr;q(@uJjSxV0OXe+!nslk}I960*&eS1g=S9f$l!NQ5iKg<5o zZ4=12354{~nfRE(4vrNz(CCqlt?@Q6?_LMneHjQkRgRZ~--!KTZ^ZmUL;M^Sj-bB0 z=Z^8ireFCux!whT519o6wRF^xH5^u1hY@5L#8H>a{7Vk$r^#s>L2mhl>8Rv=&Tu4o za)soHpJA_a^fWY+`;(q*jtL<(=?IH4^s0} zVP9s42V%a5A^Bw<%(HCp@AU?p?<|6iMLmipC?iewj)}Ns z!kzu{6zKIQ%e8Mg!lQFxajXK(e_n}SuJ?s&UvrGNNrL|0T|2~^cP-+q`6T?O%IB>g^cWZEc9({~1b&&KtNqK9Mo~M!aON=djBc597;IG^$qz*8`nm7;4y~uoOG4tsC zXTY@|z5QD(G2f&PMmqV3q&_O?d2biXqUrW-p~!xK#$)Nc3+Te}ur;z56kvFp!0ns< z*t9GjpQs0`A1Y(EKL@^M%-~4)`%}p8imb#(V#9B3JUHl&-&d$#546MH!a_vsc7ff| zDHuB|70YATGyS#{|JJ8t;Wp|>7rL;wr$gu}v;RCjgtI&Mk&SK$>0gME5gr)3cN)^! z&3nAX5?bUOh8N``oLsVGcH<4gZLvPi4DPd%A^(>lO6U_=J)QcWsvqKQ{9)Q_HkREP z4KpqJJYL6Q^MnS8&--Mdh{ivc#Dy(JSP`B?z6<@of#idA7O=a=1F?Y~xLA~hG|r37 z#WlEpmmE6xdin$9aHVUfNSrwV8|X7E@36)h&hraRi}3Qc2PSw%;*4zyERw8|ez+X3 zZe%gjUCB;wDO}ujS%`J|>_Q2H+HzC;v2#cFF!o6M`Qe%}dA|qPp>)9rf2lD)O|OVl zCv~y~J+S)7191q{<3nas<2Jy5%gBl1Y??%$`XYCKYA$4Hc4t0m>0Gol#hBgq^6pH3-bgZth5v73IvSUD)HR4D{hZy6Q7%b z=pRvNoi`povaPX-_ooAj0?r1uXiSPgVSpB{Ew4i3gfx7+&E7Hz&%2xOT!bIFARb3E zH^7WSpt=$IDA=RLpaeSEh!|Ji8!`hCzQhoHspoid&P@I8 zNDh-9)Fvlmle|EgOAYmf0vK~I={WSaaL@ZBrn=}uJ~0G0q6{$W3;o4fh0tH%%HHD{ z(2PvQ*oRh7+E9&A1K4XdtQtm|Bav>Wj0?Z1U+f-_(_b|4;-~}K)F;FFk1O*zqv)TE z!pBqN@sXaA#aCRgXk@9x=hZe1=D9L>v)&Z%mrcR@HP%?N(;aTB%b;Urk6Q<(Bj&~| zl;(J2*T|&hkjzpIj9`V zG_sKMhwLy3f4)9Wib%fNA?#k!uaO*$vh8Mg=uN-It|HEOZg6`ah!jU-1`*kR^Re;*wm%`uO3{CfA(5u}9BeuHGH&KFZyPff7=yY}$rb6D59c|B;vrizu zJBNK5eVz#OL-I(QI0=`3+2Z<2BUnn?;tTyw$9>%3{X7I2^PBC;*c%ZD^64z%tk;BQJgBlOKtPF$A!32yiwm|o7rDVa+#yP44D$8;lPvj@+G9G|L-c39<*k5`ABFeZ}kwVUi2`KFD8C)AzxCBuU6TS?E8wAWJ7 z?{jy}PZ1gNK{)D>dt)65kFCr%dbr@@AI>39z0f=;1XC3=dB@hlB<`>8%V%N^@2OT9 zUquf6`Vl)c@o~U(yk<`G;CRm9x7c6FT(b>1v)hLxKx-xUA>>i7U7tfs$q?l7TG-}cq>VtI<~TGW8UyawBH{q|Ad?EX*Cn?^!snScchH$v{uW`o z*rzu!6SBh$QB=yYUl_>4mj=nMkS9#WVFPT))oGVstG&RdqxAO|o2KtT6TR zEPT6bfbSaKuw6oi-v~cg(et`&0y}Mvm>@!7K1`TbPP1Qt*JHnkO)KQFBYPAX2##29 z?8|45FTF5D_|Cm_Qe**~Bk8GY^MV)k2Zw*j*B?-iw7(T`dDSiP@CExb@28{x4FeSI zvWL$C`tI2;^rPGt@sVj5^^kL7Yb}1q=feGLJrvUWqs^!%z2BNx!ES_m?g9%sJ@Cq{ z2vd*I`=Bu$mJ{PpsN{tb!%7qv7URR{a*6Y|AC078-YAbjKh&wa*&}+Z5t6QWAjvix zXCC`t=wE?QR!K+nA4519R$Xb zL3+WK)n*{4gnhw3=0mHv09i!~Fd#t<`mKk=dCrd|H;a&<&pltcC!|aA(4&YwKTZ5U zGR=Vxdx68IHR4(Y`{@K-0?%g4u_CpH0MfG86WdMCMhBL8ksTsS_oiJ&65w53upkRIg0+QGt5X!zG z@&^xHAUpjixm_z?icO8L#j$W*Y-yN^g07aBdftuxnqma#dm-;d5;pr}kte|Yr*tjS z$m43;!u;(n1$e)GD55UvV!+36q|i_M|EJW;^_M*Beq} z!zq`qLiMaZ&T|)UkY#|#?VRCam39tPss$H4M=bd-9R;(zhG6?`k3DxOiaUUOy;*&PJw+9Zo+cf8Lb53&VOG$?J}mzrKkq zRSjl>nbX~8iE`efP3Rx=-RFqZ_?akJm4Fl1+;M>YTz&5pqmAB?^fYPAPJS*Dym=4M zj>C$zdZ>Qqh?R9k_%ELO>obAyAC-odSB9`2QH!(EdFVF34ug*LfJ=rHx*KWXQ>`&I zUFJ#26Z@e?4&NDDEE^t*^qFyZFxef`#+1N~d&;$o3M4*H(ti6{o0RZA=aJa2YK-F7 z>}|=?!;zUDP^Vu%Cc_Qc7ktstnv13!&TUugaC$QR5pT)cwv{4l_O0+XHODi0<`-(| zBWsi=uDBIrm69twJ;P8cpNynlR=C@39&>Od@aH}LS-=;IT?!NBr?yuEzza30_OYjpczmFO|>6Fj3Xb@$rf6ii=NCg#j!ig&g`lo z*M?l%F9|r$eMoUv3(PjC$I5qQhzgkp!x`NXUM2^bh6xBC?2fE==IE8|hQdpQ$XVis zv{_+T@hqPANnj^Aj{P0;F|MN;np=Bd&VzQb?vXY%O&a zlV>iX5AxdDQv5ndKlH~6nD)IV-t_8?=l{!>mbjATW(A$3$r#?h2-9nr*9gdm^sNM> zI{TvQr+T;@D#vGM_FGE0kCgPhv;Dpb|4|QE3U7rm8_1>DW(W^&2aE_TLBCV{zTZs6 z%SZnS&gkd9bSOj%|k5L@<57&UT?>#OUJ@nF-3=-;cP<&-egWL_{$A zd0iXJjEe9;iMtp*GOdJU+*e7@lQbVE>3NsCJr`vMrO?T};)Kn%dWvACv@!Jcgkxc8;s@MSLg6x5)6c7GHU+!KGuAZuHbgr!pENbc%|O8T!?t+B(m z96omgnbUadjf#S~I54#m=eO3N$xRV$=8wgVDg$z=CPVp$z_GVvnMSft*xVcY_fKXQ zUIrq`Us$}W1X4{2SjoL)%ZhH8;_yf)e_{{nIuF?Bo55nZKuaEb0&46qS~VDPt0Hmt zq$Tn>tM3?@h82rSvEtr0v1{FbV$p6r1Sck;u&*&Z*E->fT_HTP9kHlB5>JLQOWsAm zmpbZ2`lxKKu)lbRBKLgK7+R}=lGB!0y~+gI2A&Ae$c4vs7bsW;LDxJHzsK3*N>v%2 zyrM5{K?yV#bjK9$52E&+0S51nML$1%_E$S&{Bm-C$g$os$p<$tC1Y}<5r%cvL7CeB z;IMjT*~a42<2$06J$3(D#Gqp|eRTa@aDjjCYD;|Z+TH=W+A=t?G%nn^k~_R~jxNhP9RCt_b!CAw90 zhvw)up(SU4M(deyex`?E1Ksg`I&(cr?0b3Sk8rPSGQdokJE>!SCm((5>5G+ce{EXS z8_5oDMHzXc%e?7Fr;l^WXFDwSWlng89ea1&Fn4<_w8$;0(5b-JC$oX8uUmdS7ARq8;a#T3@^*XYUww?bmO@d7n#1w-;XUeAIxk+bR)IT!r_Sr7^90ySP!J zhu*2v(e~X8osN!p5LANWzub|%X9oUhO2H5EfZklJL`_-_tk%@v?syrT9`##n=&6sH z-II`HV~mzXF6g$a5POHwTlX#m0o0J(=z~*dPWT`8h>UYz`boMme<&bSbjTX_#3qJ%5?=S0bQeU$B=237w3 zex727i!BBCn>*osg?{+rk^*ZzE8KrgF2j#J^qgFdE9-_pXX|~@o~MthI}td0#|SB^ z{up?>5En{a5d2{>Zt~soq=pRAe;P1iaTy-;uaWpXNqyT#qrZr=y=9>D-3Yw~O~bBo zOB@?ZPB62VCBMl6%t?d$&SYdSb%*ZJc^DkbPV+YU;ZF}lH|;ZGMy4SDy$J95{Cqc) z{Kr?t_`$i(eGW5Wg6x60hHxA;AN}jIsM{}qr01ol{TAsnFN*`5b0g)V*k8mR;(3m! za3{0ujVqRj2<*Dael86Q+@t^DJiW?i-d16R={GS_rbRe9>0#+oG7)o(5V_77Kl$7p zu*m~1*%27@FcaIejG|SU1Pi9AGpIAt9hx*WDhVVZ$y zceR!K`A6K{JTyUGa|LRttyni>hXw1!#G7iG9dKhYU?dVhW%6Z)4gi_g^iBli|UQ$|3RTn(LqVtT9Guy;g0 zHoN9Rg&OkO0gZ?;&cnL8Mkt=^j*PliF=3b~8f>yr^VS&e9+Ru$SAy-_)A}U`!uD|% zWDSkba~6G^%kt1xIbUKQr+0f#7`L>GI1^pW_F!M&aw7zOj9TpmUyk z+9%am9nO4sQ608fcf-+=Q{pUpbB7lvWAimbgdSpF8Sn3fQ|+L*FbF5MCF5N>~(2$IFR~vL)Oy93^0iH*c zV{d~LA`i9+*@gNjZ3x0)_B9>q=Y-cic+VMW57Tdfcyfv?-Czs6d|m-V^17;)mP0qM zL&%$xDK|q6D%?%I8$=#dx+}(crQ+yw_T16uI6E{7=hH0U@v97@e#Bx>R5_Y6WH3~Z z9VJD&DAlpXkIm-1ciZ85M*&upx?=Q+5L~(ufodxg*rhWsQJ4yA-%14g_QwPxW$xCb zpsB5nX*ui(Czq)AsVp3-a>T`<1%)8?vFD&QxKGF4YvgHVp0oGI=mLQ5K}7C6F;~f}X3&v6;He)lKa4+a!nZm+OULj4l@a zNW)^DH)T4oUZW6Aezw$agHXi1zHc6RzqiV;Eh!tiYs<0xfE;F~y%YJzb#TRN8lSo5 zDEZ(FwQuBAbEgy(9uCcY=~%tU5=(oMyF85k`T69-4H(Q^_G^(?Yk~T$v1saLh}!pF za3Npg}RO%ri`_=e?%7-L#(G}ZpnPW(hTZc zd~W5~J7IN^FD{-=z~EDsaBr@~V1ojDKVE|oJ3GWD^?$@6HGRm+CgRaMUA$3n#g+6z zb_uvr0}R9bjvU0SGK2TpDp)Vfh3hl=D54dRUvpLbwlje5&s>Diw^*z1g2FQeXkwmy zdMJGYA{93enjyoR{f5)>m=&$Vg`J;8Rq6@R&sGoL|L|{h5%=t0o$x@Hy>oT$FdWa$ zHF8B}@_xR&xEktY|2}Wz@BNl6(pSF~y9Sz&ha8O4D$IY*@q>A2KF)U9U@bW(3)UxL z{cm4-6_`V(Hm$D2ysv~jD_OrGcp36kxSwH<@{UkkT47BVgaZsNmLc? z0@-*@Fh5a=+K(l$`MV6K|9d5z2DXXAyccS%n#n#XT{!Xo|Ln&Cuj+?Apm`bM&rvb2RIF@M8wVl z46R~UQ->@rmvn{sV0~O7QzmA^L`2-1iut#=pSa?SANRent1ur+lt7P41C(|1F(;!2 zFRKTlE~6W+#g9gzB71-zTjTmSZ$!x!lH=fwZGD0<<3bw7{br{J^ZD*G^RfR8xe)97 zLABdw@ja7$&lXd0@SYh={&r$FY6X0h*~ifuir|W5c4#@Eq^1_q^gX>eTZ7j4uJF9G zSp;yua*KKFxK3?saCSn}oaL_WuDdtwjm+}8%mOyM{Ae>ES;(4MMemK>m)vyBki!1Q<;Ap6^Ph(TG zKFm4qu5}@w@4O>k9?C%__~u`z_tDwV+uH%^?eVY%zB6 zxqX^Dul|?IF<+Oxx&OIfWO)JV?bKjAXf9-?r9#1D5o&W}(2{da=&dne-#{Ab$&WrT zjhrX)o_vRT!Q*2Xf`{Z{&~!8WSWt`JL3s#RSPz@^FU4PL-iuZIyL~m*4+B+9(4QXT z`{Xn1Jnn^JqgaHvu&1!i9PJybaLqWEzM*Q!Uy;V`XS>BpEi0V*GYezJn80y>K3v&H zTN`hQtW80%m_3VLYc1%-72^4|3|xL*D)Bs@F-V2ICm%(hLwYErRup`~3=2227l?C6 zUo$%d_X)wp_;~!Zb-`q6hRS>&g*H}T!07IQqN)5f; z6L($G(Pya@!seAjyDAGk*$*V?-sZ!;U*h1rYvO4fb^bqbh*UC!T6b4u^e1D2yP?|Z z=}_sNjqPowSil^^Wchsj<6Da%UjGTRUadlg_qPxJb1;Otv-v?z80%X^c7-kGAIgIF zk$AlRV~f=$bJ5_yuJF_Kn7{dt7!vSEBpYgC?T}D>4l-q*vMctK79q2XCpNwghqqf6 z?Ab4~1XWm*kb^C}CrNr<-_~EE+goa6L$qPQ{J}8xnKlNH+PV56a zWQIPkE0BI<4vqv#ex9WIn4~{nSF>*-bNn6AZ?Xw;yo0eR(F7ITm)EQ*!RlO3$lMA? zfn`4Qbs?)QIIBMHr_x zU*gZTr2S(~yL+NM@u6@vU}ksQO!5lHV6RmGK0k{^V1G}2$QePKT+BKtygS`sZe& zgL|!g8+&2X&SxTXx-s%Dr<2XAg>MSJoMrNH<)s@^(%m87GaGv3d@NC9&PIhjgwb=+ zkv;~oyZfQ=OBd|DH3a=0r$MgS6|Ga5p_)tn-Hrx$wa3Bzeg>v`mXd8%j?h5ny54s| z`mfL8_Id+M_47rdfq?53?mn7}$(3gf_NSt_H zESJ;Ax8v+)bkV|yMUGgmn~JSPcGzTKiH$0Wu-;;fV?CKqeVv4dQrC7)L@Q_C{c3f1dbfoAvsIWh^0$bb{6@qv2fS=!0v5aL zk!{Kjy_cmpeV2Uiv7s0rk_Ou}_WwMr#TI+^Ni3_E_&iCu6~_V;(5JIaY&)TWi+x;i zaH$t?!vUQuxqIyIj@7GYV>I=s(q`^7c@HaXqR;4Fxy19tjPc*a#?+5u^dKE6-9Y}b zzb&G_q(kX|9g>3B`(82&e~Ps*y1WX{4$i{UuhefdrD58oP53=CgyFx_G3bBy-(GI0 z>MVgc;(>_fDAX;=LU-mnato{BFH?vc7i+MyZxUvqm3Gs5R9ASu6G(m6y@eeCsK1w6LF6A6j8SJ@v2aZ(rgZU# z!LA5Q%_zY9Nane`>d?2_96Yt2C$Z0}<*OW${Vs}N4}CaFMZWP@QXI~c!$FLlpa3oIbg*q@>-a$ zi+>o3cUq|!<7JM1W0~I^mW^{#WeDQ?DCXc7p>fLu&2J-7!abAD3KyLHQHlsNR~Va+ zp(mS;o2Aw;|3SVD_Y8f;*1_sp*YZ~WZw^}76%@xcll4f7cbD=fE5Y1W4qOy0lUmp8HwvdCoo_n+z;dtFl?qC$zm4Y6Y1{l_-n(UA{ zcyzWFYWi}}{ijv@^f&*1{`&SFqd=0=&Khyud#G(s*>vM;?;<`pD5~g~>&5Zd?em?y@jnMMA z9u~6-u!+58;}&*@oX!)mtkM{l28ChBPwErgIUD~?uh~yK?B;V@3P~v2$Z#zj%ME}n{V0!GT%dHK2yYj;LGeluvggv*wb2GPYSmb~HxG}F zR-@WW5fL-*h#JpH$a(F9hw`>ik9Q%Du9!?pJ4E-VmUB1`n~(S)QMnW^mCCSe6T3jK zD&otKR`F}T79Jk+r_MuO%OdV;r?78Phukiwsoa}oVduZLIGS2X#u{}3zF#Emdy(`! zN%`ys1Elb@?U#^k(!`Y(au}J9U-``yTQ%8RP2JIVaTI2kq`)}T8ZFfoI7G&>vpKnw z!+WD*?JMyioE(?S^i~}(z~N+9<{C?p{l9$2jmc3Ndw zcdr^;7x`s7#Ctv;A8Tg8`ST=Lb=q@3UjS1}TPUuK!Y4BF_MSI|OMW#rUdTu3tSY?e z-UEMnKNrzIb$MTk!1;%INRD(sqHYlezja3OaC-F`lcC3a^lk3tWBy?i^r2cvdY*f0 z7r4o{2!&!jOlt{()qEp7boF5O3i~jrH|o?xApE~H6!vk(F#0~tTub1rQ!ep&!|0vVlO(o*1mcog58Rri^}nf{(+FBIxKq5J~!UuJC659 zbmV>Ug1sbaA|5tt^%36J9ecT>d(iFzS6Np)E1LtiLQ5QZUyr%la?o_X4tE=R;n|i8 z!YV-r;^8zHwi@CPd8*y4=;s*bibQMnXm*{0zf?`}v!end*|W8bJ`tZAPsHYX%oSvt z!?a%kmZ&fn%e_Rd4Eq>rebD?e3?}PxAlq9H|4J=@-MZN@7`+e^?mZW;r`!>0o$S|4 zj)SPw#X9Cwx2`C}%(E^CGKj=w=0RiTnc%`;_U>foW2bH{Rz%C-dC(Iv_@)7}S5Lv& zTgL1i7cdw}zy3yY{ne&G`OGZbUtxe4`l0WyPD9|Pa*2JvlIo31U-rfM$WLO#H0lpq zreOOn>d^PCp)jEk+H%a?=J>+fFdQ={u-CIIdvBL!LE|R5xbd>|I(0>#>{S#~bur)Fk@;B{qUaJh%Wc9sA3FdlAqfq6AxXby>oEfvrJB^`0-q%%n zzAi+t7gH|`>lSBIYh+LFhguYM$%p#GdZg~_f-yI`V47)jlQh=i^ zPB3{Cfo$^ip9R>%^?eakF6FWlr5Jy#y5RMi4$<;rB7)m};1Fqy!B0K#V;A3Z>{pPT z7X<%>S-9}nj2XBp1lo~1ae1eG&W)`@o`c1|Dl`^yCot6!+f6L7ZM_qwFJ_LR)){i+!lC?63|jQ;@VK%9 z0n&v?cwZs0e_m2wLT&5-EH!;2?w{4h@^Jx}Xln*V1Nzu?3UJKa9(iPtUX4wJ^9gqx zR^+{EG5f{Fm80gmEJi3F7S{(b|Hj?lWbR{^pRnQmu^7gE*+re{kF?GVB%IX6P0kTk zgXW-Lzgnzsr2n()IWf9j7Z^SRXW6UJscMhihYQddoI$DJHw<|WE;Q#~Cg z-72wcvl6cLX%(5!#_1!}d6i7?(UbivXY;Wmz!nE)F~iNdd+8@@Y+6`~zpiFO3@Mek z$3aq`fXC8Sv06_Kqr)}vUl(gs-Jt(k-UkE7E}8q?3)%bQ*?mHe?nfKM_NFdc1#-Y);tz=W%`)+kvkLe>tsi|;KZf^6w;5GyDSWTBNE80u|xHdatwXc5r~{8a)^rI>IV21OC|#SU8HFqLs7I&&C9r%4N9yl#Hr%rI26S z6;g^_;j~#3|H|0lY!{H%3B`9(+KJS2X5r!cypD+d7yL8d*H3bB2l=3 zJ++$r9{B(Hvds!NlS;8%!wxHag5mg2F49ZM3)(^6;l+FmepCSrQ9*OgMR9hO0a~B2 z^Q}KQ>ksYNWmAknzRp;?-y2~nnYcTY{z&>!)-KLPSJ!%pb(z=;vN+b!E{o=U zIG1e%*#t{?X5?YGr5!A9da^$(3cLI2;wV{$+M)6E0+vX8o}~RN(@I{6_ot4D(dq^` zaUc*!qIIC!-HsgmeEeDL42zH{&=^cM={;@C=R5I+5?LuL%gCYqBmUd;RE)kyE=q7X z5*3)|dq-CClv0fHa7X8-8OXeu$+x%}-ZWKV`ZM|}LfPATvM=^6Iwp$8S>Qq>J$!1$ zIB(#Nj)x^U2tU}wlUf3q9jh)d#!6niKxk*W1YhuxB1(70QS zCeFGSvi)Go**8F%`SuLO_AT}#G3V}0CdX8wyDI;~V!c2b9+*mMrw z?zh5Ta(DAy6kt$Vt;FX^x zedcq3_y0>6m!a#T za&)v5lJh2mL5o^N%F)rd&z_!_^!M&FrWO`mfEW#O^D;Qs-Hb${13RLM3bF5N20j~; zGgb9o)Zcj~q#FznVC%#FI19)HvnR4&38r85z__!~&~%@J?tM(Lq=5aY-7`7sSK>p^ zPvLO;tuP?BchdVfw2$HEEr>p{btU*e-p;bG>g;{nHrUF zyFu)(8OPDFyB)i`TkP(3^uB)om#`n)Pv;}EHk-Y_>x%O{&QZ(U$XF9jkKa*j|7AsS z{c`M=WU;rl+z*>;2q&~I?OpoOX{rMzH7uEBCo_|Z41$k)aA}w1yat71aYk}Lj~3GK zX*}7wB^q;ro4a(R)wO%7tll72b{o%1nUjtD?82&qTrMWbzBtp5iHRv}vam*9_Khz_ zrZIo{G&HTR8@9i!X5Ot*HEpa&f0W8|;qaJzbtXV^CikB7WbdmeA|5B=oM1v;wMuH6 zXV78c45IUEkRSC?t$bmCU3>BNzSZY^bzgRvXQCJA#vc`BxibDQSCsf=e9YbWxokFdWM8xKk(@WG z7R|cVMCYw8PyGG3VJv@#6jxU5%AwT_FH&PdSbRSgqxR-h^_D!#F6p$$kUYMquJn8I zPL;^K^`XNsKKq)m=beYJ$dfpaBde}D;`KI;Dw#E|65s8^4B0CkkUDZr3%r-_mmCh6 zgXqVje@%}~!Z}HJoXV?cDARF4?{);W-8kH^_ZHTrz!7HQ|^JLIdFP| z?y61l3~egh-qo2?NcPqhO|%1svDwU>B7C>=W9cu?>oL*?9Xe9RNWZBBnN8E!Z=oq? z!MDW^b!pWpRVj7S3bSy+a!t9MWW%f$x$J1-!kI2(ndB5hjEm&W3*T+z(5b>ykk46W z9ftT`QGd6Se1OxF`1QSjnu^9aV~Q&xpSdy2wHOgty?O5Nw>29({Uu_R^U+iSdB1ws{XuZ#JeTq zXy&L(XYrAI3Qs3g_UTPqg%W;Sa)a7h^E0S~h#Oh5lPT0#PoybFH?HSBmEZS~(z|cY z8uw84&JgZzg)1GThZ;2BlbY}2_$188b1M|*HkFZ9EsHfPDsVp6h_sTX9P8Mh@s^%U z6CeMF4Zh+R&cvyw=(hLzQ#c`>C(k7Z)j@bO8)f!0rIJR&KdPUh57opR6H?MbIdtC; zt1x$&lS+1}v5)BU$I)4srhjZRWu5f4~c&M zFE^GLh>v)xaH;h@xOgg)E>}(2x2cr3r>62bO?aak>MraDHRj~!6KYL!1J>vaW1{S( z7R&dxYoug2?Qx=B>Nr~WO{L8$2Re<*XP;gUZJmVcxci2xlXG2Z>spX^Gm`v{1Bj^d z<&M10A^TjoASJC+{V zbh_igWB(AG-$&sg9G-rKW!x|ky`qjhCp0{7-r&!w-rDyn`imJAFT=3&GiJ|e57Mj( zm?S*9jLTu|iqEu85a(lLFB%Cm?Q#jX&0v~ne@9L3!2NBnRe#~E2i_RP z{O#gzyyVK*&uLtnZ%e3X8-g1|aB-?VX5xcg`F9q#bA^lZvpOT)?kPP#(T$b_^X-5M z>*5_)_$`Z=1FlS-5y;6yu^fmtX18r2egl(OS}Fbj4d*DD?#G%X?os!w)~mJho-Ei~ z#Fp{GXKW(*!_N6^uI511ju?6VOSa@sQ^}g2%MBml!sX5(uF)HH!s?R>TxrIwUn7Ve zDRZP3UIgjq61TvOF1``0zciJ}2kdF*B)M$1d9+ztf~My=eEp#AkGZJc3GZm}rU-)G zneojUe1^z#qOS+rE(TJS8;N(u01`R~ch@a~6(wb~=~7qv?x*T@x)F!=PbMZ&>X2M# z9`Bbt1L0$?8Zd&RYvjIdZ^?$0Wn5|~9B$!=WYw-uZs*HtOMg9FBm=6^Ts`XibmQ>6 zG)}j36o-s23HOAZwBC}RWu*j(_i)aSB9@GAhqe15^*eYVzMFzr+{lXh;stCPCw{Jr zaObBFOEbdhbJL2?!lamTbt(&<=P@d!4vVTzD+e!AniWi9+I$m^m`MKESmBkdv7>eK zvHz!|nl7H|hfRgAze0R$kBWrNcR)p#{#IGyA4?M^+v2=F*esBorr9yr8`*PVU@(#O zCvrf1xkutk`Q1B#OR19EqoJN5>B(C)OYgC|K3tC@Hle(foMN3r4vZg_!|86qq5C+Q z_M#Qt_sWuO4kgUBNn^cjna2Cuy=&X@M0jgoY8f(rPdK>`ELb~5GHCXTm;avPAIU%3 zIDIVPlAZjoaS=m%W#d-8kTd7sDPylIszG(h)t(c~SPLWaFI!`2BV4Kj4rC4sVz10? z1`3Dh-jF;Vyp175yq($pUzEkyS`4V@$DF&4LoAtEqWj`61Cu-57sdB{Z?3&$=nx{=-~6r4~dc7NR5E66cyh+#3~v>1Pu% z5(;T5b36;r5-Kifajx@w_42kJA<{$b5xqnFdtvE?>j_QIYk#LoW!XJYtJ@3TyR&#lHrS%Oz?ItaI(53b@pyIu;q4OH?=L*FJ>u(s zB)kW|GJ3wcr8f2wP3_OYlJDS;v7RaJA0@9WIi2ieJ7!7-)v%Y6Q90UyBQxHBhArH}g}10w@{UYwHI=gbs7 zR~573ZB2H~+M~W7HWdzDFfWB`J9M-&MUOKH-6#Cf+yG8*PUmQ;6^3u~DRxe0{Mvk~ zwlw2r!3A}FZ-0&&i?>($^ijWImCVKU8rl&i9+P$pCUI|~g|IpbS?Q5P%I-o|4HM3S z!EIIj`e1fOh0$u05n=6}8Q_!2Vd4HvTq1e7;+wCLFZ>&uQgo}QkP};~u?|A3R&{>% zxT6B@m{RN7c;XujX8khBE0;aVnR%XAZy3r(*$+(=&g|o-az8Xr;7E+-x(iM78#T?r zxoy^jgp1$RD%b9)W2X3uHs?ZbJINWyVD@oWK6?i8@Zn@0T(%PJXf{h4#*lV6k3(Kv z*x^)@9eVwl_&+b-dU%{CIcbvjdB|LHWuFYga8wju#e?`TU-H;4Wbmi-k0&PHQ0rIU zR+iNa`1+T>@KW{o(#e`D>oR!W(w6tOV_6dz&YL!p4^b#{?}T`=>N** z(eRhb-!_OI)kC?~){=8l56;WXW^sxqTV#*k!#34yY$5;Mllw@kgt*`aw156m z-8wy(_QOWuEB)2+VGbPGl)-*odv+}vMsnsPPRl)=G^>btGOt?Dponcd4yi8F|54Ll z`BU1h02lH9585UD*b4DHr`vOPLIf)<(!`Ts$at?xVMJtMdto{uAKEkN)B|NwU?8>o zP(Ic+5vqg}uU@31^U4}6`A%$i2%}S$WKzWyF#mNH&S!Eo{<&U@o6|$MaAzkBrb%8n zuS1L_Z_9yRAG7$h%~3Q=KCB!Y$36=S$_|xco|euksp~Y=QGdSvTCL}Vdc0eQ;1+(2 zxMM?)iS`5qWYTQ4qud!&C^wD4?Wi5M>*Zr7Gxwc_1seN$G}X`hm(^g&gEwl>=^^Y) z6+U9DHF?SQG?G9fKGpVdv1XzW?c4ad4z9D1Uv)idNxM<34C zvLte$JK5i|+0x90cC)5nDgL*yNun)`k(}&)#cUG>`_pzcxsY3*C&F#|-&{AV#1Ov> zdvq(($h2^wZ`Dv*ogR+sbRZ3F(J{W?Q$dhTI?n?uHTLmonya2yN1Fxh?x@668>+--yV`Sz_#y<8 zYLt)dYDG-#5yBap#JOu`l51PWKOQ;MUnYAQ4f{>QvtQKLy026tTU}D5uI;^A5+aOU zh(98FVl!ttiuU;Dy9f?`uwdmU(bqMUK73X&n&$CodT*Zd>&5ckuhp9WgtK}%f;rtS z=-XWKDs-}medR_1l0p4UGLJ^PqQ?v>o99q>TqRxCRb$kduWG;XU}3`gbFRJ2=Z`t! z_$-U-7hJh{ZY=f6BU#f_v?liod2g6T=%Qi@L%yr~N8hOr;*UC5F+x1+{kSy11ZBe{fs87yo;nNO|BT>7ElRn}@xp+l6o9nZ6O^DR=iokEqgxwWw%{xyP zipH^Yb|RG-)~vQDW>Rtn#mh@I-lt~gHe|fXH8rEql(RL)Q=>`;%Rh%PJR+GVtL=GV z;)BlOB$^F%rRL}go)5}np7g6*ySHa>UCxO0zm#^6c&xrkKEmGN40s`bzxkp~yd&O^fvzm?GnQ^oCNgq?ejP0L)uQPT0~$$x(`ln61KZ2JX`4fhD9H(063p?Pp}1E`zjd~NMjulc z-CS}zMF;QK=d;rNtV1Vr2M#{8VB>V-qDjI;IiFnGMb?`jllsAl}_?~!8 zT4=_{)O3$taHbhqmXFl&9i}*53!q_zF^5(QV=_eUhqs`$U<5gjBYAVv1^2qe%)OY) z!+-KMp3`djJbU)6p5%FIQ=?)a{nmJ~__87U{&DBKRTjg%J<;~^1@QM)x1e*yFtNtdRbF={R=pF(!GZ1BZntKIVoSSH&l#_d6crjRst4 zSV)VDal$kx*0}Fq;ogE7GcKqf+pKBkB`glTf&4TT58jg$^223rW9-IE(b6~cazXd^ zEL@ff*I<7+p|2a#_V#P#cU*_`eeNVnO+WFABd?~4{w2kkpC-Ws{EEcd%a*0z#LF9< zj`eh@=kuHL=a7#oXwo2-|4(x{*OGN<4zzR6CVaaaKgRph?r{t^Cz_Fer;t|(NxVpw zOcV{z(=;zc)ARcE)W^v3gUZ-3fSwh;Z2RKOl?o@0TomTZ2{&$NIWcleG}GF+lY6~{ zR!1ej>U}AbE;Ycv_?~h(EAzUt2|RgZ#KiByRrx9N$O_>|d>e`T?PwmpvZ17p=nc1I z<6~HYruD9-+%!$|UNqHRXgcqSu&pWm_em8p%M|T@#!@%Hh~7)>DVdN%>psrxlx*;& z&l2dl#*DTxh1lz-QqV*=tlyh3@xPy{ z_Yj`%ts6y)!>CO4NfFsCc2w>*CiKmK^n$|j@$_Kz;Hscc+QOJ zyioGBA0|`urG%;JKhzbg>fDYR!pHUgtdsdt=tn1N$^3fENKZ=p2lKUaG^GM?fzp|sAXogP;S9-bl_lIeh645M% zSEouf)+=gS*ZtG@tEwRZXAABP;qB$2yt!`1YT2*6OB1feFFTypPrz_lC=Eia>2zN5 zF#2W?-6o&3OSPF4_*I>qq05cPVD>wh5zyZQ=T2EoRLny0Ag9$i!ZwWiN`O`k8C_Kj^j-%u8g*VQ&FBc=<#-JrS^FM^yoE_;T6em+#m z?xb$BWSnORFFwCQvOuF*pEra3`u5zpSD!YLNi=lC0KPUC&z|sqF0P!-%VF-M8cyd{ z^U2KA%i>%_9y-lt(=I$0+sG!Qh|c)UW*y!x8qWI}mKgebFf%oq5gs1GybL1TIhGHF zwp_C;W0Z3yzgm}ReBRsl&9FQn`IPO<=`%2t1q;Nt)WMOZ6LXmv?1957FWJGQ5+nSV zchyUoU7pO$b`{)T@KycnTAeMKhU}g=hS{4;sIqay>0S=wrnzxu^Eg(?uBq&zF`+X{ zsO%(~zx!p}>{y+`t~XUJLld@*iK68zLuw>B;7}`@l< zCQ02EYs~G{be?4~w?39TkEuBWBt!8;o-o7pm@b)W3#aCxJI0--CcfAwrIX*^gn*MX zMcb0ayp=O?ZPJK_t#7ORDgAh3=Ed4Cw!C>NK9ol}c%JY^drdGIeo43uGMC+8F0(Gh z;UoFl<9Gc~Tz#$@Tgd#SXejT$SWzd!9;anFST}Ux>$z~r42$AGH#qZO9uL>1@w`zX z0moaCa_pVDdHR1ms}XFSJeca?9{jvM1+OkLPnJDSr$)&niw~prhcbHX52NeHGBnlc zXnNnKcK)qe%=)Zet=GqN%4nufu%mAYObO4U%R5(wkD5rN$9S5|75`h6%t&S=v0C~+ zO?e5vbLx`g^FZYu7)-3sI6UQhc<->}+MxXBx5m>olZY}Z*ds>VC=$i}GRt3>TZz^4uT5{%8iR{#}`1fuJ z`Tu-Wt6H8@kK7G8*C|9in8JCMz38Id*~Gtgqx}!bg!(p>yhi#|lYLyPp^0qXQ>^jN zjoVh6*ltzo^Cdmr?)2xamI!_DM8coW*U7` zZI9hlvs;@pHY0}g&XOM#>qWv}qSKKbK(ftvVhfWg%Y$bRh1+^EhYIs@YNXZT*5S+Q zO|pe>pc2swOtr@c)HYJ1!NkOq3Lxzol!VVpD^7H-q*X;Wwbo42_;u2*9<7r0tHuk> zIFp{u56N+VCViEKrD#}Nx-nHZ811JCtp8=sbkhp<+KNBrfpCo@->Het;+y|5gcaf8 zvI{rh?@zLSkUqSZq(v_B z=k&|KWxOM1W=^;^PQv?tJL?Ow?^uyeZdw@wAKz3H3!keo{S771bTmg~F83o=Np@ET zKbO1me8dR)ybY&YPA|?s$YVg}L}9QO%Z#}uNBTciKaLG%86#NHQD&@19XL@g-Ywxk z)*Cp1$@8OG{?eM7qCc9~DV+yN#q^Wxfc}48Qf9)B3K|;1u1yx~>Ec3(buN}KJoxr5 znC5lUsgRvgu<({oX=f1hrIgUXhIBY|U;Wv52%AraVI5`8$q&MhSewK0`K}}v1yWKi zo^x{~FY#T8cxW;xno`Dp-k;QhfE((he6ANnPG#U{Lne1}Vtr7aWXZY_JUf)0vPa)N zNxpxU!nbkFqW=2|W=Sq^vpLmquC2r2)$X*?v&N&AGslie-|@tQhLyqG?>~jc2^K8Z zDx}%6WNJ(-X2-=AtXcb9HMnF<@mR^V&NSdfrVCdMh1V&%l&|7(9Q{&w#vw+`doqpw zrD+_yFFM@EU)6-;-_=jI0jv})UAes_R-fE>W|_^EVZIFRGM-oeBvOG1l?FvDYL~#V zMC#oTOgMR`TTI|@ZOMPAw57lJeaDAMPN>Tm+&rQPsxZSmU33XC=}ethtnvGK zp}RI8CcaZ=we{)WRWyHV3`rJ!YMMnB%_a%g;q5SfZJQ!`YZKyo%DwqY_*nk*& z#~Qy^nR^FGCYL|67E1pov*9bEPafYxv;j$Dd3!L7tFx@wzdN5^t{IsAS4e|?jq$B| zr+Pc}A*6*nN!gNTZsg96DXGjDB3vY6VZ_fC4$lk+n!CtdWSv22mwb)?e($f=V&%6w zoK5J@yKc_38E1-dO>YWLWw3IRH?a-Ja3d~`q<2p@IoR~D3+OHG?jua9mJB9AG3s^tkk(y>vrDpUp#NqI0YBsVYR{nm;7bM3* z&yKnOB~slbQue4`ba5?ZTy(BvBTIc&+M4J2=TxDO5ocbGW?COJ7C0*2#N-mw&6=!m zVHkD~<9%0Kaw6oMglryM$miSAm+I1?+p2J&1qq47~0NpsO->kM~#Xr=Pl+L1dKT}hdi!n~R`_!yLM zw`~T?CC}(n8jR6C|d`VYaSt7yMQ88Wr46Z5k&S#ZOS#QsCkdM8;4)oj=_x{$2%a^9_c zjz#}e&3#{}8S#e9bQ#GU$+zES>O$*bIeZ`D#=!~WXzQLpi*c6x+E_&PN#S*^lKZx# z5x18~K0JQz5_kV%){uupDv#3w$6(4ZLV=-OfXM)Eh}^2Og-$`GTUs@<$l>XYbE zR!A*9?4B`4n!6G%wc4TmZWI}XaN=V$2I6%s@+%?HSMIU)r8Mf*g8sMeDc>4KVfy73R=LLu$&n7p;MDCBjnDf#wiQjQzfktV-8eWRkik>r zIrH8LyRDf#uyEv%%rPyS$8s@Hxb16Y?&gw#wfF$%4ZW=*0}rVS_f5!elfsk;12P-f zqwgSDw})LRxiy}y@|-9%)o1gBGAe5(vL$euAfW=KXrrz`wg5+WFpW z79H~E#j=BGmd%XUk`r4H$Zg>#+nQTYTq-=xE}|DcRDx&QCKRr`u5u0-({^VFtq$nZ z;hY;Ay|dV;??mg_ek@y)%&KTxE@_uxcqvP~`o)a7(vpKE_m$flJ#5a6BzBOAWE;CP zd#~uMj!VA6+@VA)ke*#jSlC~S8L}{w(mq8RpZ6@FK0U45vf_FlVf;yk%mhoKwS&lf zp2Z*17wnunntvN-F=Cx!Wl1HEG9_!O-r;4xnR425J8?otJ2pn;nznGe5wF-fKLWqn#yvp9gh91F5@A zbdJF;%-C1RPVuh&D}4Q%Eo<_A^gVUXM4vVXL+IPZnt=w^?5~q2`I0smOq)ovh0>E% z*kJZ3k8Vw~gnw7S>9lQO8pJjn9VQymmY%WDn~yY$(PqnN&&5x$BHP`_CrwN1!($g%zxk z=Zn@qMa<5sL-w`ZYF6Z6a!sQcwppK)8xFLoo58V$o(#SxZ0hQ1qR|}4xcx zR%IIh+`xTx@oN1`X$gn!Q<(=j{}^#(lmkzMH}}Rxa$lzSb7A%bdSqKtC3}9W?!sp2 zA^OpX`eZG+u58~LU@xp>OQ}Z&1^JRJcgcPoTOxF12l6?ViBbMUL`x21-4fCYbI|m> zjXN9ge#|*FV~Yj;R#DstAA-#o2eyCCp_{w>4SxI6uvH>y^{hzwR7%bMISgoCO3Lb~ z%DgH`=@w3)S=%i7_v=k+SZ_*hPiDJWJz`_i86jS^8m1$d*)5xOhh#Q*F#?YX_4qL8 zjY@ka`38ypbTAPfTZJp5O46C*LI##Ojd7=A0s70lh)TEXs zj1+zAuhRoq)y9+k!6_8}a-j9ik-SKXC+LPXqka^VAe=eVB?TJ4FPfe=GUlVYHSdCY z{==Amjz;pMo#dW5J1{|fgMI!tN7)v{k>d#jCK)iaMhOd6Cv#rr6TYIWeE#iI-sk(L<2&H1s=EDH^}DFUsDg29 zxGViwjw^q($f3Bam-v+?v+fVcVG@6IPy15lolIf6d%4EH-`YX9)X0&K)fm}7_+1#y zqU8=O@sK^o8)4cM3x8r=3KcSY&5Mz|v$P6wXC!mv?KBqHYSXJ&_LM{PDQiEL+4F@n zyUB@-(Rr*)cBgXRIKtm1@NJdCRQ8*@{?3%wUCOQ-wYZ!9OVx|gq2>Oe)U1>ot0(U4 z?wBP^M=$2w8^vV5c)pvMb0kf8CE6+MD=(!<#Ba57#A^u=(UUqmSoWUg+;VWGVn8;h z`iTzLDU^_*vH0j)vgO(T^hc@O&Mu?Y+ym;~lg(;<&`1V%&nESQ3!g<(rJGR1UROUF z63X@+!f6@Ml@`Kh@2M5R_3JY@GPf?hOYbQ+;U*W>58`E1QhyFEL`~@^-uX+)ubC0w<3@6In=!u^2&X$H1D#3svJ!>Nrt7Acg zJX>nkOhDVMNZ3#J)eXOEYGH-UYc@x+?uh8DdW*iwDUbg`oT;u8hxf*K79J5jV{{qu zGXD$kEn|D5Dz*LKZuR-Ia0aX5*nCuveX_%~cv-@11L1dVibdyLB>R^cbET2!H|1Vz zVpq;Uy=vrI{#MJI58-C+aGV-SzDIxIbe)xXt&sfDD`l)7CS07C6*TD3 zSZ0CuRC3_zYfm5fkt+hN$r^r6MrWPYx z-%wdgO!%XB6tjNok@>F=4^L-u>YY2dvYcd@19sn+63 zzdrmm$dN^nW<0v>LcJEUfB0K;juzfL-w;99HC^imGu{#G2S1)=2?nKKxa+R{8C6sP&p$F;ShP~OM3t7MKRxpO|hf2i6AUZ`iC4Y@vFe6$57 zBxOooNW;%3u0_oN}KEFY+$+Xhk3L2}fO8B?O? zEKHenIz`$@zV`@rT#n+xE@uYi$ZYe^Oy-K-pW-HD?7pSO#2HZWbppke|Ks<`e)2{t zCM_IkWbZ{{ejNErZD`f8ochhg3-O?s?PVWS#q7^2xXOsE(V}(fEPL3Mwj}(Q&E^6J z(gVj+w09ErkIW_euaE~h8KM&|7X5}64LhDy%Q^}t@A5<**c$RG(uFqdviNY&8t+Du z1rj@nxRI{79xbM2aURXg@~QK&2JZh#rs?HA{5CR@n%$D)Qp4%C&!pQ>4-!0t>)=0$ zwCz@SjTb#ad=iu7`**2UE!HmIs$3SC(?2(pLE8sQ?u#9D?_{y>^Wh=bm0cMPnPGz0!P}c7=huPaMFhgk85Hf zeI3$yZB|GiKh$`gYJ5931V`Th-d!-l>b#@q{j+F%!<9AfM`LnrGV5iZz}$TvJPO?R|A|kOJ6K?WXe=eRxc7B+4LlNR*7%tbOD~VV^}70 z+FeIKDlZ zPJVi&2Fi2rXiecB$UgkmOGk{HvhkLFzG~-i4jhZbN_hXxuF2j)e33h%%Q4jZrat%h zq55qV-OPztT8IwBHP{R9?O6;q9?H9^qgi|@k#|yGy3d)8O~a|ou$U?H_Qsg*_^M{k zGhpxhk!)#dM&unw-e1kZ>#z%V?~g#cO*En1tSGHkO7IBbTX&T{NyGEz9%#=e-6v|9 zvkvv2%kyZt8MO{NQLr^0vpC`Ny%pc|%b5RTN<_aXl&q^rdOs-C_;qS}o=)>x9K3s1 z^{qFM;uDkJ2evP{7>GPwHjH26cIf z5!+?oV>V15yH!w2^vN?zY#C`ihF*@*7$=%ahFu|_-=uT=qvTK>uY>lg1FF5$XX`g+ zU{!9!hozG1BsmTxmz?k{51{j#IQ+$*J7ZiKC;m=n+JFjzOIo8_f4`d5(S#bJ0srM> zEbI_B`UK?=ao3CcZ#;M?JGcWnru_Fs8FH}2C$=BG3%O5Moa2fW9@cKE;*y}MgI{rGlHBt282jHXma%o z0xO;58R024%v8xWa->D43OY~7rtPQ_R?TjUZQvDkx$j^Ggk=g7%8Hu*Ig3}ogT)KH zNN(4g^=_HeTISBKMswI0DLUDNxx~hNQF%63RE4h|&y!*p(t=@&oj&gJPh$*t}- zg{$F{Sd=a~qQYrgbUGbJf7!>iZ_PQwhicHwA>_#pRIjOMw(q(VC|=-_V@~|pIGS2= zSAV$U$7Q+KN6i;DU!xM?Znoj)xx4D~+adhC8O_f~@iP~=(5<@UdVX;u<(38UQTl45teG73az+fg`Xn*N-A zwV%p%?R~X<#SnBxjUvs}n1Gz zeQebbk^(1UAv~VVQ6BW2nk`w>Ze)&(#H@8P_9LYp6h7Fno}!bkQO;VuI;4HOs#504 z9M3$G&&hg(_LM%pZ7S1ROP?y*yit$h*fYe6Gs0of9VwsBtP(zd-Kd6~*s0Qb1<+M$ zq_*`3aj?4h21}AL^wH;a!31nBh0!w9j=`NJmuqYu2c<`_Hta%0^`k0N_~o5iMAB8} ztwkNZX%w9-*|^S9n>jMXUsyNQ#0T+T1w-UJbzx|!#^+g@*5i)@U)AfZ0i19cN}0Xr zJ;I@tUlzx^+tYIKaD2{&a7A<#%VG=2D@|n8lR}MO=it}%@$S@w=O6lU#SS9Z84KTg zD22x|gxBZGRjmNl&WPiVza^D_ml50}m7U^O)AW6*Ci|RDlYXmebNlnX%!@t!jZkM? zDe#v1^SujU1O4c>bqeXP%=xpw_-m#nQG18%e_M6oX|)e(=$gTNT@%RLOf&I(NxxB^ zg|({_8)iGwPFMH^viH1kumt0xL@w-+++*+Nq+k1>c0bi4>h36}O)(;3gb#=2XVSFF zg||2SNf4b!?aK~a{ahhVx*W2kZVmXSKDWeA(j6myZW%@`H*-4w?ZB%1TwY#tr{1_g z-qeg{Zj>p12z6n_wREYO%DBIzHif-Es6@$4>RaMN^+a=A*1M6lFq`O=URYlljs4mf{DTJvWl9}^Nvi2$q z=~pKk-{b;zu9E$x`CAn}$B1Rpt51Go!1)n&xGc_Qqo;6}^}WQe9L}b`(&t%~^YUUU zPsUZSc4u|Eop_=Aq@KUhJ{Y@wmbgr`V^ZflCcSsVUwB9fBO=%-P|2@`a^9pAYKsmj zYvUG`r?*Y@N)Os^Lt({Ydg83Z4p6QaTr^k+p$tOrQQ6}2$_?Q>%E#( zTmM?!5q-!c-!QKKZ$8+=jlQFWJGR3OuT|n@+8D#f(H1zWGD;VvV;WyZ)-Wv|OD|WL zKA6An1)_7^go3g1@5K*$Y>WqqO-HiBJ%S03gjbSO&iFr5#dlo6%abjr8TVA##hEaF z+XQ*e88Jn4Fa1VjVVp1bOPF{grzhijU-B?j1wPiPm`|vn)Uze4v>vGg3wrYFp&Osa zMsdEDBWWJxtkRd9%3GymiI2hRWF($13P?7P+HQIw=FjVhR`sbWh%#gUzOmdGJBZI6 z-8ir#m5&>2XnE9!3&W+J?=L$@c@7SHnMr!%LUj8!fL;Tm11K$#(x_!0;4XwpnG< zd9D-Rn)$QvcLMt?4TU{iObgwqT$cH+;|DD|9e<^UjMkG`ls9#BjEKAEL(10_YKUL0 z&ZaTK-MHI-q$}}TifhLfqZT4&Vqsc`Nzwj>(ckt$+KtaB5U4n zjwfBRlqQWRrm0r~N3%q0tKsX^^gPpTEx5Mqw)%JVKn9jb2F*(&^jh0-M0C2a@phR5nYc>rUa+e7D0V zDuMT}tXXL}ji1SB!q+e7z`SOBw7;rmZ#Kj8a2OVShVai-A9ka; zw60*(>;k53FBXkqV{Qb$RL{l@;>eF+CJh(;Q62)tWBL=CsfEB@x91A{g1V#gg$ZMbSud-pWsR2@gUh zM@{C`yQH>=F7K4gUv@@IpWf7k9^%=XeA<=n!^Y5QK_W#@EqRB6DU)K4N)G4y?VU zOnVz)D!<>c_a@96=t9mvll&2#64n>hmpp~1YVIim zlBb1mRO-51*?qr}&*x?7k2`3&GI(MXW``x0RbHoW=XhQ}ETvt)hP+?%OVuio|6Xkj z<&vBGhj@Fdnq=}#bX^NZ_;ad31OsYXbFfD_0n_C*m^knQzLmnBp zbIvu9VY?hfJLbt>2PYGK)`l8ii-`X#iOnem95$#!#Cp*lYKiN3^JJD>Hs#O#qLULn z*rhQ(Y_2g9>(BA@=_nk#p=J znM8*pF~Z+A#PmWL$J|oTI$MG1vATT7zpH9|(?e^-c=qU;aJQa49viY~@c2rp#=qY(DrJU-a#q_oRsXo!2rDMo<`^=jk(2=7YCIoAY~z6aB?kSI{tucK_=ug-3R0 zq-2xlmJ&IjEroABC|{W;x@ZM))Y}04K8p3NwX5Da}u8Ig?KC{ zmGI}0Mr=v1&CYKFaX;@y@4cpMz39w=^Qj!1TG&p0CA_t5 z#j_!`N!d4$2?yM9jk0F_6&F4V``h_X7yi05itSzz4DIJgpneGh=1VcJ0*TVw#XOpZrTBJ&HJTJM5OmK*W))uHo-frhZzDWAL~~-aJ_{0j zan;FUxwZ7SPvvggm&D+QuUkb=yHodcuoL{uB!7V&x5f@5 zz9byqN*lIYmdPwg<`dy1B;Tw>?9S)P>5&dk*9f~`>Vl4Ig`e~#p3OI;Zrv)Guc1*m z?AB#c4cVXPhq0hZ5xRTo@_hMP<+xt_DZ>khh}GxgYCozPWf9WWnO8Z%_;`q>wa69E zvKiP2o5R?qT;ucl^=L|+rw`TGxIr@C9?QcNV|r;j(@ZjbO%h$`ILL?7eWJOmWzMfS z;b$2qv;18t*2b-A^XRTJ*($t?tz)^AV8Got2NK2OWD{mX!{2VSNeQP&=3{+3l(27n zI`1rtDB5vTX?vF|*J>fcYp4`%njVYq37=*kd2j~BLWhJ63RN@&y8mmJwUHj-II)Btb1E|)NQr0~{GR|YNRFn|Xr^nUFbNyNjiJTlG>a4#UJ37;?NugQ z?x{2jD}wgL<2S{W_QzyCDEp8q8*85T7kzMfI3dm4ne<8Y{l9bgtCskFcQxlgt130# zUyro0A(H)S#EQ!@7wjc_i*2s75#EKLQ38F(8&fLZ+htNy)jL{-f1x&p+n=db&GcE6 zISxx}Q%o$J*gj4^pXM$+Jtce3&}d%D{%EUf312n~U%y?c@FE)EVE9|r%kR&@`*N@Q zOC4e)5Y^AI%BIK*;|+;(K)7wklJ-wDSiL&MT1yd+Z~-OQ^GI# zuu=4d+QO}mUl)Yk)l`OL3yZK_FX_%ss!ESy*%u=9YpCQ}tnHc2 zr~#74ej$|aZG|;dsmMB#L!JW=QVj=1H<;u)bUsSn03!hvT)2edC!i*kD~uHbVm2_XufWY zl}&UAFfUVI_lntO@fB7h^hQT&%}#K{@? z6kUrVczh967EMTf`cm!ru0!|^Kb{MRVQ7#GvnHp~XoEXGA3bni70=8WruYw&y=X}c zt7aDSG_@VUC#%#wF9T{l4d?VIBSOx4a=S`0!>ygrHuo0qbrSj>j?_Okjm2GZ*qu2s;O%m3+Bs_Keg?Y2KFn&i1l;Z2(o@xKSR4axIV(~F=QfnBZ^VEgl z^~vOHaUl1t>`23e2|WX1&xxMzPO|hn1q@r$jG5=F)QXS#9IZ79AHV)Qd@O#a#8@81 zI8Z6S@7uAlY}la3rR~BM4w%f~%~Cg|zEfY4U#R&NhP1pUKAZ3QG;f(BwVJ_e-m*EHKD^#SJ_SH@x0QDLxqui z&rC&McsmW_D_LSIeUsd~XO?Q={@-1dnrXrv*?SK^HHZVPeL3+gjpU_H>@e_W%Y><< z8rjKwd>R>9(!b9vVSks8Ds#>)*&!L>;X9rmC#^Uo-@jksxvYLQf;aJ_STHx0n@jp~ zcW@yyOv3nDT%qxK$q)Zij&mNX1y97+YUa<>WGmjjm7FxmDSjn$hQ8g#GokwwmP-BE zXryE*zD#Dgc_F@yn{ZI)p1L?tm)cpuG=5@0)>E0giEi6zhdte74z{LKG`6X>tdMz# zi?h^;mkY3L-H6P?N7QKHg8g?jf=v@G@!sJ;Q`ze$toNdF@<`Um?A&pHF{>^VQ*tAj zhu_2pt>Mo#4SAt16<<@O@e3b2q{4RUiSE)5KjB#pkms~*^BjJ(mwAZXQDsJPgw+(?>zG28pG{`LOU=*Q zzv#ER5KyIl%@toqNH`OpTatJg0*B`iQ|Lm6I+CB7Gm%br4d{|uK$~ufY%-G1nT9`? z>d=7k!zDB1svfna_uAgpkW({kc~zQD??eXzF1yox=qlTyrxP9aa%5Unf)9sFdh-57pzJ|Btt` ze5f+(zdooSDA;0m2S|gI>_s;M0@BhA(#@eeMU1g^?AEb6$8PLaL_x51%xB%dKjFN1 z-p}?91OHZPWNq1&;964}yjD9y&tW09jAgb% ze}UBNo%8F4K2;aRMCxX`7ZY%oy4Dd*3j|KgMPPt61}M|JUz`HTG!2aUQ3=PaWW?!K zGq*0wv+z@~=$SgEh6G?mh!Gb5V~uw2LcDRb#|^CzSQ#Xtry9HAR+Zz0FMC|M&zAQ6 zjZIZV*TeUOiG>!fRRyBKOc(3J9PsD`^8=<%P)cz}+}>KZ-*dvtp7i0e)BdTB9mY(^!=d*AYVGsTay}PF|1{u7#{lHq zSRoA7+TvZ$3Y0oeLg`2PtY*)}CU*;b(=fuCngaBq2Rx&B0Rs4aSlhD>gKzhT{yI72 ztR2t&&~&_AXM(63j(9vy0-q#1tT6S#Xy(AiRN3O-;!=Domtgb25~wNfM3EWy;k!TYEhpb%qD$l@UJ?`WC=HfwQ6^=_zh%PtoijIq; zm^F06!=5&HNPSnL8h|%9s1F;5VVQF}c0M;jGI^s#qB=yKU zIgw<{(MNR}?4z#633BiUSybZYrn5r6^^!;`)5qXzIoM^V3y;SZaM>CScqH6V5%=QQMil+;`(~?4ccWY;0h-HU@3av|;2_ zf;Np992{SQH5H05J$+B?@YTY=g;6*+oWAY7wumFw_G^eEESPQZPff?qo;rByQw`;w zDfA(6AJX%;7+oiiouekhA=VHr4V<(4TR?&Pg3kkN5x6xNk@*pLmS}`KlZtR+eHzB= zOTV7am0q~>p-uRbA89_4jL@LzaH?c~?)O4;)3UJH z68wGTw7BehOKebOANQ{WB#za?UUFDM)g(Chj}6-A@*dtd0|%Cpzcp83XqnCKzSrC9^HF_+~?AZZbi?4^d$2O{z`gLz^Dq!cF z7Gd;D6ZY^yJoVUb%zd8eEx`p}dyEU4g&UqJaJDzZ8Q#xa9Mf^Ziu*M8A=rNQns{
q~X-)oM?Uy4wnk&Ae@LTujM4ckMW3gZ(x_|iQ92V}@A zy>HF@Y8KMBTjQCRBR=u(+MzZbdCSY8XPd^py>hA7Yg1(2H~W?NhyE88+1W^P)Wx=0 zR*2{Pr9;LB`cIr-u8@GwWrisCrLOTL1OBoVQm+@N`bYfMdLt$)O~h8t`(_pzqhX*e zd^m@lVB$b_OBfPcl5mcBk%JG5FlkK&M(C8_h;d)ebDKqcjT*|~hTn4Jf@|2JO`G#% z`qIZPa{}%#rygX2)cs{hl}pE80cFUw>xHPzA4Q77Bt(#fw0@Zx%=p|K)|`*FOSVv% z5svhy36Q*YLZlpfxjhQ#*)D>AjtZJ34@7XBHryt8;oO30=(mepRr;wa3N0}0KJ#*! z3AkuOK7)i_2H9kodUAf7eo9RKyk00Vlf2_bA=cyrZLQk)5L1r(D!@6D2z-vB9%(cI zU&Gli`5_%M-D)v1!IwY1)di1J6~Xr`W0?ivnc^lUp44ksAaciHjX^4 zmwLS$y_7IE^^H)|n1(K@u1JYBz?%=|oSRCZypOrvF7CK6J{Hs64I$T5jEz51u*1C+ z=Ld8`_t702*mKiveY6VGYDLSdLC$>i%ve=D%VZS+sG?$|BQ33YmmZSdn zFdUiDCfY-&Lr)LFF)tkqy5oeFBng?c%nx$UWhujbLAWLUdBXb=eTDCx$RXS}5RIIZ zj8oM>%X2S;_9mxhrv+B?_uf69eYiiYv3@^$^=x%9;%6lk&!wS(IaT@R%6OgFA;ykS zMe<@>40yuaJ@w(qCi#fqJu`#%!ANpW>|?BO!n+i2*r)5VvIN0N%4q(6R=ngrXku?N z-9Ao2R2LtJzzjS;N3HIYB~0l*S;;wNWBorw{eU-^=lO410X8qQ$0K)tT&53VZmJO+dY5CQ7x|DsDj@CabdvsvJH2Jl`?fkl@0)YB zVv0-cz>j%FSi-!8W#4EhBuAj{Yk~N+Jlp9>(D0@hy2`y!viP$&WIvVtdtMknkvV@E za_fDwadf*C&IXWOGA0tOL-f&bp&YN`QeZ~^%=XJeah~&zh@%?t$q2;6E{2Hy<%kW6 z|I2~d;P+KeB&ub?LZT|Zg&ikcYZ?W@Pam%Vrgs#y;$N5A=_cTOqf-?pe z<-qBrJ#vCma6Ky>%f_3bS4jh!^!cnPm)Mj$#v%>%egF$~fb71AR zxOHk4u9wFnk@=)D>Pq2lyk-yj8lsia^ppY|$B#v8wHcPKFvBHnXM9R6z?g|nDA$U_*yK3; zYix(tODiy}M*;rWRq%K5Ry@4uIuwvEQVJ;#;XY(l)vxG*-a8-5y3=i|~hkzbR?SP?|?A0}d!4 z@2N*c0hW9#LFJuFYc2k+u#*D%smIBBDuyI|19Hq)2#r1 zF+(lwdUxH~ z{ebijzRQ0s{2V#&7#x7{+YRyJye)>@EyT5rPOzsZv^_Kv>!%oF2lcbcfE*|qR^fi% zpJK|5Z{pH94W#pZja4y*=0EJ7jiVpztR>p=0x`KD2yTDae=A2#-ZdhOIL*U=UV;zbEYzQ+MtX z?|JIzCkL~|H5fi@O7Eexc zu5`^8+F2Ux12n_%Eje&I2jpxEK=I}XdcF1G8bFRtK4&yC+?PwaUi(u;i0gMn*_0Uw zTa$}k`&H4+-3>2|@)4%ufj60U2%^_wrI7|Qy&7?*EFB~L7hsyUJSu1I6ZU~^R zo2xZp`raBI>?@EtZjG28vr)Vw3q4c~@#jz#_ZWFtzKk3m?UB%b|4U5Wt%_P|Ub7F8 z@7>uJG9%K^w!j`-IUEqYJ zjrp9YKC`eZpS=4h8{{eH(^uz!{nULcA17m74Evy8mtq9{XH7Nq7)ZHZ*R?&d@Y^*J zK>g+4Mag);IoH}p%`zRbv zPr=k-?s%UHY@^p@t6UK@>Rga=Iv6*&7k}c;xu6PrI9{gVL`5Z}z0T9rUk2&l7R}dm zAlsFE%ySbVJJJ?c&n2Q)hy#}FG(zFQOmtG3f>r*^Pu~m2qraFVlJYuxtNjx3gHDT+ zPss(Vi^bSYx+veq9`A<*@V)E|zx9E*pH8j!h7tVat8gke7r)#7mq#Cj!`Eb>8>We? z-FzYWp^G8I?ch?AhxjQrP>FNI&jm3sjbguHH2FOy5(Fl)XEJ-4C_HjS*#B*g!{e&Z zY-fqJwKLHmmjij#nRrHhab9Z?+{l<))v^?!DX|E8xeS}E?ujg=J0grc`0>V`c=3g? zCwX9{Uq0r>Sz$y*Jg!@%;VX0856+ihY{y(g@|^nPbstm@cq+o_8*QV1P>Ft3y;JnkFn>MPZYdHrmb$JnEc- z@S_&Yx?7{uUHZcVAm*08?`5vQ zU?x0+j!Gnp4YV9*bTZT=IvuFO+LIzaudG0Il*4E>tDC-3ZW*MQo?wIup! z=)wQQ-qOfCxE>^zMatLBcNhR4nKtn*e;Pmh**Laoc>^@WldsnN64&7mr=T z%+SF3YX|uZi<`=@Xxs;}P`L}T8|gnf7Kl&2h8Q`CIT`v`lCtbkq8owgzTsFn)c|u0 zs$pi5jk#yptB@drz=L1K!@g7S?JRkIDn=+UaKP6?`N*RmdXH}iPUWPcW3ma{Gs@w$ zBn!cB`MhoaEfQ9I79pLqG5C4_zB0G_cbWqR&Cf?ZHI29%bI^ZdDta^;!MlUd$EPB_W=*VXY6osID5So(RbLl3UBXp!^pXh#r03q zF!bL5yc}i(tz2syav_)MG5als_#$CK5{|AmM*Z4yj5(ZzVZQW3|I-=PtH-J z=b_R17;uEXvt-VpN86!7XBIA|&c*RNT4+%!VSg<<0j8EiB~c!VT^@^%oa0n91w$m! z=cvRPeI9*QPi$bP?*&(dxyU=Dhb=zkh(QLt=TzeOu^-}8@hdTC0Dnii!H|8agD2$n zES{7PIqI&xj{2k6Hx7>-43M?B95)kl@IAi*4|?~+itL+W_iG)bRm7laH*<0wHW;yy zbE0G$JfRwyKZ-E)YZYGik|V3=r|9IPgMG8SalTL={@3i#mp*DQ`t06? z2jQDa47561LBG8MN6FFa`jC3jJ~@n-{zx=T(8joVG3f83kKsi#kx-Nmdjls#`^-Vp zS!S_HfZX^RXqM&UXF)Yqju?rlr<+CSWHm%@^@Yo3Lm$8C|Xa3thB%wB! z4ACW3INFU|CUf>R{gy}j$V0-i${Oj<$`D{V4d>07v#%}2d3LegJno4-W+^xyN0!CX zd6<2_h6Zamn7at0lH#!J(Gsj~?*Wy;55*Dweun48q1kvE%Hmy6xtx5B84h^1*%3k7x#%0i zo~hCGNM&x~z@-`}?C*hlm%j)d`VqE%@W=Dcyk9ii;WD`^;ce`%B3pj{)Ff=)X^c-! z)rj?%Aof-jdo^W|y|hhy?yZf879ULBM&0JF1zz*Hl4@^>&OxEjULH;dKC|YJOVD^O z4-4s;wKY;e^puaH{DLM~IQ~doKOOmRIPZNRftYWDAtysnJ)W$~bB;(+uBPT*0;TO` zSU2FCSRnIKEWfV_-zpDGjRf|PZ(65bjKW(sxS^7Y>X+>Myl#go*>ZfkS%4Ckas)If zp<(fBalF@5I3AeAP8FUvmzcBDHy^4?ISV!P!J$skh*)ogmAk2>j><%C6gd?0A zJkq~w!j*I1pP^33`zs5tG-i@#fE_K0U4_QbLNsW{{?1A`TqbJ{3D^=R^m!kM4G{!uKj)WFcSp3Lj$ zq3?J{%%^Ue({6|Fr+iTUEg3(p0o#WaXe1Y{<$Jl*uRBA2c}w71;e12`!_8*n%^Gkv zLJi{$XCm#6c=m-}zVC53FxV8`UX-B=Js9o(R7$;G)6*AXB`-|>v{gc|hPVn{N&pv#HxBB4l!ewT2x80*7}=eWqn{NB6SQRDGLeou0lVhow% ztbycNE@pMDg|@vS`s%d^nHn9q-uA#H=Fz`1dvc!)gfaS@`E~Y!?S(|l%Q5&rKfn9# zH2igj`(Bg&uncQwKeH~*>qjAWmJTxI*jJ&H2b=FsD7tKiWaV_6Uuy)}jrI8GNj{27 zJ>0j*V8*9!V)$w;M4Su4kzx9{!#V%?mIAcfyP*8@93;qRVw0W$eVH||GvR)hb4O`k zR~I0I37#LsyWP52t`&s_@*GR!+1XfHgx5;5@ZXt0$kXSTdQT7CnM3&JP!LD>y zz`z~{gzP|l98xDENXHO*LxGxm$yEbne`b#B4|TBEdGLKCgZ9j7 z(J40wuRqqn(Sv#PTnAWuAyeY0DYaz!wm2sYU*N=?<09CWR$%<726WAq!@3(MMSC|5 zXiSM>m&tT;$;^>9r~tn>H(VJ>zT%q%Y+hr89ps{zY{|kWt#YZ?>oHvx!n9SaAEu4l zEza1zWI8Skc48(uAG0^vB4cD2Gt4RY9L)^x(^9NrZoEsE5){6cMTo*1@!+m5=C!!N zhu^dG{q*mCAb+>Y9p8}&>l};Ns$e? z!>QPG)gCtkoS@N~gaZ$BP{a9HR(&+uI+tR}nmgiz(+LqB5{Rm< z4E2YKYXJ;Z^KtqSpF0J$Sl*I>r0}Iuuh({|6I|!NVPAnMhW(Yoe9c5Ss?A2l=uG5) zvO&UZ@=XJ?uz9vQly^5^NLn$5Sytmehp$3)f15bDUk8S-W8lnOg8w8JT*=MD$Z#ht zJ3j~M8`AOaxDgsS?}+J_kH6=#KXAxRv0?i+@%)|%HiReQgrz!kuDHX3&#~u2ZBcYO z0oQ6%k-oqRmj=$qvG!7SU)GX!`bqquMsZ}A8lujS|4B}M$S7M_v!^h85%U@+BbmV< zOLDouoyJm}y(+;Z-%Bhx>v6c4xUq#-{KXYeREWO@tOY5+QIz$#gi*WFXrefZ)~2Ngkispv8{oA z%JCB1{7@tHdeXi>(?xx#bvzdXJM?j~cNFtXdU*TG75{wA$F?_Q3!Lyq_n+yw$}X0J z59{!q{)s)z@oj$F7vjGbVL*O_w@DmcjMqUTy;xwLCRxWG&Dk#KQ3wK-1ijQ<6IDX=h6$%AtAd# z%Jt066>;qF6XDDA;^Y~zxM!h*&Rgstczzn(VvoJ09x(ZsMjx*M!kNGQS1AW$SJz0r z-jjh!7@Ge^eC(_n?{h=S~K6f@ff!GI`l%*%+cg zf0o&QLc4vN*c`~-oY+uu+>G#xEL}A+?jBUzK~~obuRSx+_whKSdgkH4+fbacF2ndO zvMBHOML3L8L+w}g(~aOcl{wfi$p!e`WRI%R!KhA7z?dyYNM1lasv{e*vFtUGa=prt ze?*`4??gLu!tUSazjd zQL|XQh4Vyy7u`Oww`IKpW=zV%4LxR4ode-nPEU`f9#CA3YvdD|POpKq>m{w~kDD{z zi-T8ZV1p;`srGsp-o*nS)Jo8F)D8coFrV{xF19m&J9_(HnEO`&9M&$D`Z}du&!C?y zdmm)bpZDE{4RfG$!vG70&ct8w1&FU9-*#UZPQ<66w#XDktu^?@dE(vuHRwN95j%Fv zz~rtbCZ_}-=b{!Qd5$pm$c0P~a4Vel7DWaqPKSQ-%05y-}m~M##wP zqQ->xvrK>1_+hh5=+3<@^rU%3r3l*!;X&rsr{JTrLUeja+Jn#KOTU6d;6;?47LEV-0{-4 zOJpk)Va^06jE?ey{2$Ii)@kC9b0s!S%YYJnl^yRFi8p_ah^m3XXszV6wNAz?CnKy{ zQUIdjpd#V^V((mB-8v2_AM3E)G!;1~8?f@H5+++e7B52Cqox{;f!*m_3lqGT7NMcY z3Jn3Cs5R!ik-X#O4)b z{8J*7P8&jLMJ<*#kyV#ogRbnu>}&N{Xf$ghdp_sa&kWG1!V#N36ynA(a*@)*QB%k~ z$s9wR;`eLSVEXR%)xrIF7yQ}uQRu&$ij!+*q4|J7ON}kgUn<0tGtNjg3xn?UBy8Aj zjI@|CX16l&iM&zYej^cFy+&*t>3}Or3vn<@7v3Ren8klr`_>Ri)7^2zBMGagSYzsf zg&53#K2B>sw%UCaFE{-slvZnFb-FKxp43Dyo^RLMOR$3bm;4fcyxJc}9uD{2|5l*< zbq>Cqsz4v5M`GUQ-@<5w3Cs?Lqi*jsw8uH1ng069jgGMX6^d&P8OR-^ht0?8uxeEv zxsCN838N)xqBSsIV z^fT11A@3y824{Pb7oTBdD&1nv92`fCK z-+R+V<{w9pBjKBeWeby$ICwV9*Ht4pxdfdWD$p=k9$kBMh*0|Sc6{{1B$FBJ6_^R# zR{DM$$t{t@@hqQ&J=V?$CwC%IkNNNwARJMbt(<)Z%wa7mVNeTHKqZ0cYhPSScGqON)NY+w|$s@3G`m z0Nm#BJQ8Mzl)wrcxXe7LcNJQH%EIg94I!b=bggy-#>N;yM#lnE>FdsBHY0Uc2txZL zVPmBU%Ac0u=o1O`59&P)3iLDH6TyS^Vfr8xpPuTYKYc-K0}D`l&ys9IPt4{0>^Adb z4|CaBNk7AbB^6Sy=TxSMwDx9E?qPs^(}VH!ix&2fr@4Jz9!jljU}ojX%y9}bv;@ow zs^EM!4?nmck@h;JUGLeGE|5%kEcz=MVCaiD#L_3&)!7nj9v8D?#17AY&cWdeN$_f& z&K`nF?BlsGHi+lI1zpf*?I*FJmo6e7_~C=I9$soW;+$z7HZ)t1RT791{i3kWneUaF zwx$pLJKIZPc25rTHeMG8sK;rzq+tTNceD7c`%I7TycB2l?)Wp`!>k8=&r#%usLWv> zLufs={oM_*uRn@t9rkjX2O;_pbFX)tk@S-L9%~zv%@4pSf8Hm$Fe8~+gSSVDp<>55 zXI3{n+t>*MZ&A0_!abnC)Zl%Oxeu z*6oIldy^2k)f9H4>4#Ubqb8MyyPEcR?%;*EE>Vy?G{K4Ura z_e)k^957&BD0+NNfv21)p9z)NxtPBCQPom^F0mUG;8OosIOvjF#J^MgWA?pTIiWEp z56A25F=Mq%mplll%eIg zJucq#$3>kK{AB;=^s*{gHfLjTUOjAQo)<~2ZKA=R-GpUD*b%CM&CGv&{ak{j-pq^s zhyfg<(dep!qI>gk^>jWCSTBH<_$rEq4S>cbK34+Nu_nn8lOL1mf365mPX!~K?2OVe z$=Ihh1#Ro{abC_JzGmcn@eDAbRvu2wS^OSphhx9=k#p7&`9t!czs3PAdjiq4Fdm!z z%y4-)`%nh6ziJPCFjBs*t)Lq$9UqIJI6YKfcf`@IT4cW);l!>SywEU&q{$iS%LCD+ zhY>p13DI*$Cc@X{L1I4|zpr<}jPK*IVV=O~N+WtgZDIE$1)XQS^lXodtsl{^SV`a2Hfk+;qK{E|JvI5av215^Zd8JAkFUd{!m{$-0sBTu+{MPmF8_MLHmHf=>Ng521DG~}~* z>8k*3cQvSoTj60JeazfTK3KDa+z~sBn-d1lplF1V&GL?OcbAGBtPUc#@S?>zP;2gC3+Te_VJI0!%qw_0Uth`-Crf4Cmd5+LK^-6>uzAmcr ztP%f!^Q!<$q+MkXPJI>oH|D@?XgCbWh|`=fiW=rT49$r~`tD^2n0i8l=A03mll+m- ztjpX>nz);4frWQUu#=q#y00VQb0`yY|Bx4Vqk;Ul66QYZG4WBma36GEG-Ya$S3-ug zuLow5Q8;@e8Ai*k5WS6zk{$Fz<#rnKmz&*A{2&eccz z7&F}XRfrnyL*9k^L7nI9qR~d^tXz)f!xFp(|BegU_u_C*ly%ld#?Ekjf54u~Z)SLX zKMzxsY|yF9h1??Yd!pHkG?L$wvng;il778aH{_tS>Wg?jS{rJdX$k7eri-mHehu#< zo5tLgh&G@Ne{3l6Aa?JGBg&k*R{$p*UL zsRw1+BW+t026swA-AOY%C@aJ6iCO40sRCDv`@-hNQ*ks)1CNjSKwj{<>TQo?KlUSi zw8qZO&iMU{{+`{Ya1)hybt)TUw~_03PzgO8FN@JGpt}P z=Z=+M<590Not<}O@HmkHtG_BB)91c$qzAN+zF!j$f0XXv?5GV$2Q^U~J0uH@i5!iHM#{-hD;G9VKw z9m6m(svbvc-LUFUH5zi2@t}K?I7WVceq0W(nLaCRM-=qUhv$9zmsZb2Av2=u7aCv{ z0cH0W<|435z0~W4Nfa>G=7DI^*Mi)lc&yn)ZiR;%_VGEV80?DvkKK^2!@YQcA^nl_ z@E7^|Mj`dMN}ln_wBO=>z(mZTS5cce{^QiY;<`(CZg)VT>?|DFk$|4x`8t%~)zTE$ z?kPq2yWb*y=_e6R?%k5(I4w zoVecyEB^c^geFxHvBKgm5Qh(y12Z_5c%AHc8i{Y117e3;>=#$k2;v7Q;x>!ER=7rK=Vi$Y@x3! z^PmP!M!4YGf5!N}hWUJJ_G!(uNB6x!aKD>~3rFe4Y+)z-&NMtaK)#Wb@2}cU5u3U^ z6Q_&xF)St$3;OAzEy4<}YK6$&ML*aS`iz$);)NTz``xSP!INO=e^rPKS7b*`C%7qS zBJ2DtXa?wFQL7zZbY*{sDY?sr9(Z**4r`tmVBDWd1n7Se1vAfjZ5D3o+M~&T3>xR9pmNN3Y+6zUlXCtxW%;}d>5g`v^WuJ@E;&7E*h#K` z);fF4zb(P0)fR|9>w#xJi8y-=_+C(h9y$f+pU57@S>4ez^P`wIQWd3N{-^6%ps?B= zCw~{i`GOO&mB}bQpNvX5TP$^^|Dk&U{f8CEc-$3n16zg4$!RFr5rjQXhG?^-Uc>y{ z_*=|xj`M}6Ov1qo13bP)uGFVY^wY1#+TtGgKK!E4k!S!;LHKmt5MQH!dv^;k!`K>j zex8^?UxE$i);sj6EA7odx7FoX^?d-|UA-zoCaA+PECwFa^{}ER`6#m_Sd~e?&3-#9 zxiS}fzD`G{u~k_8I2D1v*^hpb{Ig~EMbAyzP|=Ep@k=c{f8c<{oQrzCrQS#H%yY9W zd>0+gOa}Sa7}gM ze@mbDm{KHOES36oU+&7I;aRI_%27qHHQpG(d3bZXGs>(b*mc4l_fGjE`b09`JTu2G z_exBqE;cx$93c)WNXxt@+FR9eEGh;a?1BF_&=R_qIgnjp3ynn<*lv{!=Z%_Z&acL* z&PiBcPz!0-8*{rS3U6N)XEZgK$%(?&J^JWk&3>4z5~zDR#!57Y zB%*d#H9pn!!m$3g#3*vl3SNYf_f5aXSn@w@^D$My3N}|fv3*VgEZ&-?;qqz^h3TxXTFC%dlT^bufIETOw>^G5#JPLGVcF*OOMKO?>-JJd?Z? zMICAgAOl6QfSm!sX5GrxN=(#Kcedz>sTIy@KivZmr+&ZlCM454CXP9{EkkNLgs zSQvn<&m*yWm@%T>7O-vv1SS^pe#*W{DL+qX=Rne~7g76De7x5kdY32Th^09e zs2kv0sROnq<|3?*Ggi3<(MJ~xZR&=@KbJu%G98f&H)nMkLek@)d87M*oX@r^z6Pl6=)G_o8CA8!iXN&kqcX9Ms=zZiMbwDC~S z7^nE}`c5>)WUU}v-IoM!@;*wp)M9UH5kl_NV&MEH@nk)Ds)3p?s`tlsS3AU4P#3-{ z!R~kTJ5G>b@WOQLAy-Guq!ueareI@JjnvOEX`d_7>X?Bem2lzAdvP~H8xEXFKD5-s zw4LNKJfMfflwO&MUMP-;g192^d=)vd|K%ZVJoh1c<&ZS`nfP*31Md!yyVaGx6k{vA zT}J`{-@_sK?LUiQg_~S~HdUi$nm9L_Yyf!-Gz0ilx+vndcv5h_f zok!NZpLxJ@bsYLi=-<_@z%TPGT$Qgx$>vXDWB;q-C3!Jdhcg%Xj}Do!R#ofC7 z)Xgp#Wx}FIEq>mW!M$I{#ML?-^a)NzE$8K#+$SBnT8#Xaj!3)1fWq=Dj8zlV&?~W8 zHy?W%tKp^hN9ZVj6>C51k%bV4>7_bYGujF+^eJulX@{P~3ibXUwgtAQo&1F`MabSy1kj`j?Bu@4!(AKWr%BQ%Mt%83vQW} zc=l8VA!}rz{e2p4p7z9{d?R{(EuiX@k3R2g(5-hMeDgx#W@Z3cyE4q{lZofX>>rq_ zf=~Z$7yBm}AfPxK<5hG~r)LeBz(V$+*u!MBGw@b|V!p2bvTF2+%HeOc7Di8p;*R$n zk@Hg@`+B8gE%k(=Mk{!J;k;up=bL_(h}2^L^jm#cOklso;xwo#G)TRki+L~huC@vh z!=A#(S@38luk!@Cd@u4a=$tj`ecWJrGX{pUOrYFQhMxyA(Kw}C>h=@NfF^j?TnSt1#%0&r?iY)bw2~PEW}}>|OQ;GB;uL?1sqb#z&)pMmou>Y-9g|MiwSy!iD?Bn`VS+;lb2Xf+$p z*biYEXO9t+b8+xBdj!V$quGM2VFSS)q*81>%K6H;5@`4}i)~l$3LPCCct-_cD>;34 zWoIIGe?AfhI${0RNHmRL_VC9HJd&%yALefk+gC$vwlWG&d=}-PE)nO33FHeK{;@>S z-~xQh;aq_J&+U0UPwN;V(u95Sd#T&3slfa^1#Hc}FOvFeVHwzaH9;Tl%pnX@rVg>j z9HYO^fl+)cGUTiwImP_Y?n2Z*VxP){zSz5{RqPL*ii+`WShPS7vS#+kWADk?M;2Jp z)fFmR*so$wZLGKqOU!by<3KT_Jy%Kl`%`OX*Gt(O5y&1fD9yo{rP}zBBye#v{dcSF z@y6R56W>K)bm4ehdtMCHyhzwBtH6HqfrxUwFHDzn4%y^^T57MA zLg1rgfM?tC@r)VeGfst2|2%;D{~Iw%NgV@+`9bx!HpV8nqx4%URtM1kvXR>IBxaEe zZP=$&i~Je6sNwS=+?gGr;;G2_O>P~%o6+n_UbV*@H#?U@Y2qx52}r;y&iG|yrXgel zf9K2QV#T!usJYV*GXFjkqZ2fcWkiO!D*ZD*%y6IA`D3*WmhH2}Bb{iR{iKBj+LbUr zl7Ok+)mTx{2~W`>TE}a{Y$%8acIe7{fBa=O7k%uE(Ns+it9d3q zt}T^%zS(7OvoITfR}|i%e_Dh69zWG#6vpm_|ES-q+u+yDP?WGgb*;_}^opp#{@(1f znaO^xfnAUl{ZTx;qXm8MIY@3|U*A1*^f;G?&&@XIe%K3VU&o-VMGvDa%F)&{lbrBM z=B%V!Pul+5z}D~L(4?m#Yv43YTssRUNz>u?#u9b|_&j=NP3C1NR2IkMOqUtR8&iUr zvAGE7RLbv6S8O?aN%Ymw!d!JvoE)Lc?(vz>r@wVeofULd0@{oZMduw*)gvFyMOL`{I0*4=5pX|Y4LACo`mHa-u<$bY zICiFXe@{4G)`4|dFs6j-pr@5R)|NAe^2rv*bA8Zhd@>eU&%p6d6>vD0i;qqfP@2*S zDl1!r zi8}@Q*gS&siIy3-xxpUF2g@+}e;n74Xe{ragT{0-{4OMapr{abMRjP>?nW*2fmpCu z3#l4mcyM((jI*qH{wcsw&Sl<>_eQyLDlQ*2ME_#;pI*ww1(#|_n~(c&y9?I;Cyy(s zQ}BMYEe3`dK)*Bj02d`#AML{5S|Hiu$w+_CUPqZqEc%dx2Kue`Zc~I~Y@3K2#2yan z`BzK~aIDY)r|CPK&CkVD-5>Sc<|68d6+n*8lX&*BsF9Oa`chbDpBFY4b#dqx=OLMT z_$;CSll}_5DQ@iM3cwY03A}vOk#eOR+c;|p{!ooItzEGA%?r`dXbdGXPkSV3qZ{WH zZ6k_NzS( zAivia5-?{#7Ibt>@vCzqZcde;=aYGm_MDUb>ybDzdZCz+5dp6<`rken<3+3sGd7iI zU1ouU_Y=`=Rx#@G>8USWgk5jypggSsD#hQ0;a?wxPlzg%JNqE#HO~pL_UN5oh~;;k zaaubXYY(KNY@rFJb02RqKL?|BasJRT7=3!Si$vb%R;`c2=5u=Ne{qKk^;m^02h=`g z$IxZYLWkSn>i!0-K3;@f>;t$a+Yg>;PsP7YO7)S0%cY7@8O2(;I zI+mW*gKD2Tgm=$CC;D_8^OX_4?1XsQU`JM88RsDySjfED@zG`M#~@Fa-?tMBPs zPk&Y(zI!&H`D0&to>eeHej=9Yk;$Sj$Zm4u86XeV)KR>f1M#xke&i>2j-z0nclBBpU!$444uO|*gc!N()kR0DvrF~Asy-? zYA{fSxw>c1MD9`}=G-#zjXLjA3p+$J_jA+99&=qi(Qu7a7qdn!#{>AS<&HVJv%jgh>USy(If@v%qF#j-n27&eQzJIqF%4aL*@aJt&IAZk> z3C_p!T&?bfPSNqW*lJEbW;H(Q7vRBj_B5~kEw&Ut7fPAbPd>!Mn!MH5epYy8T8L%^ zM~vz4$L5!F5yRituhV2_bYx;)F>{Ji@>`>a^@IPqCQ-a{I{Fr|@1;cxl^?BeXL>%y z8Sz=E;fRU*=VBp!YY*5bG>iQJs`Ay?zP~H1lKvBK`l(@3_)M(O7O*~TkIM!{__mHZ z#kL3p|C5ZKnwIFxxyqNP>@H9zuSClAq~#>~O&WqTmkx^jb?gtTi-q9xWs@7TF69NN z%rr$*T?FU28EB3SV|Pj|jx(e2_wpiKwHykW0k1^JTU9jgbis#Wa9?1WUqV_pN^}M&0vq9vz${7x1wLS0J5(+uPO*eiw-%u z>;ZU}L3ZQ|b}POnyC|GIaqYXJ@z5k(y=sewWC0!z;cZh0)j77*;=(Y28PSc44RL!a z_lT2{5tu=~;@sZsVQUp5BGj>oy28eE_Ml#}!i&qyp)TT_Ier%2{fdTbGV@iuPFHF% zZ#8(Ik#fDrA0yCh&NuPPY&v>$_~6P#ff1qPb)7GT-#lBK$N1$}mc z&A(rTl_#iuPJ1e5=IY{9IQwDGOhJD+7fijKhKu1q&$0;I(x&gw*BZM1)foMZXWKXY zT=Kf&P`58)mmfJ^gMIM+t1;q!xFGr~yEhc9A!`(e9XgFSVjsp&ia>L%avAEvb3?=(3 z5y5P|643JX5OCp<-3#ha8QTNZ}O5ZoV8;AVH$>7h2VjQE+&&dk+&-k zXHL@tv9ujBzi}^FstH}rp!(9w(?{PO&x7-k+S>(L7X#q+A_ZLo&5${y4m;1}!RUMq zD&={8(EKKLj8n%B8{YMGF~zUbuK4_KAx;ns!R>{*$x9d1R&LnW4|(GQ0zNX;1= z=Jm&~zB~uGX<&r?w>17z zm(tZ@+nG5cqxVdN9G;CB&fm26SfX)eIff3V1B@Mf`WSJ?n{vm&SIp=3}<&e5w@u{EU zBhNY8>vgeSE)TOe=wZ%y2c*6$f)ejYT*IbgL31A5O$;!DdFaT;xp|fpFoT<(WhxdlVquv(Ne^(;6GxuN2gQ+Qy zKY#s)xVmK`hK_f}<##-%^11n96?+9oov{9~Cp^s)AhR>V=$G`(y-bJJIC|MidmwfG zIq|HF_w{-SaMzxMm4j{Yct8%$I#|Ie$qFNElF((p4z~VN4ONG9v>oC8rLj8#QeKGV zw=~hx=7;O-D`t+jz`YJErWr6-v_OSDIT@=sJ9jLDf7U2;VU;Z^LMPq`!F{Ev!D_YF|mKO6C} zIw(-IhPDlJj@%zqwag@oF%{*8+Sn0ZgN6?2*y&#nogJ^l(3{VN?=C&eQjS5*T3vj| zvcWz_89Q>$sQ)Jr$~O~mYRqKph^7a%B?TVyYSCc(N9gK(7pC7eaD2ZVe(kH|dcR;rM4D5G~ft9`yHjFDqgIWdx-jpIxu{}yW-;3TCRdH#rGs+*B;NS#DG|R~l zX<~;CdQmuVF9uEHI8zx|2G#B|+Rl=Bw*q;|pyLB*iK_vQ|RL8MzHYj4Ys(7|F?mPmX z=A@#SzM*HMYOqKv9Z9zAe@c9hNuBQ&kauA)_nSEReG=rakToGpak9}FO{0onJIxjU zBm}_SG!0d2*yWv9jl(Vl=&!?ms>Ju7)VZ)!|Iv^td1xLHH>H{pW z+E|9wn{1Ii)E~YlId|gy(D=(GShqb52@}epWJUs|&vT*uTOAcK9$3b6{K+qt=o?dv z6di$VHy;S z-CbNT?b>*#Rc6ykHXar8>oM)69}Jjlc<$2;W6aqVzBwKtE6s2s+!Tolfhc$(!%E&q z?3*8qe|yT{yG4-s(tyVWxmfDji0jNtwHx$PtZrjICvYl$a(^&E*BPJmi!kCJYs~SU ziIg`nun2I(1K%nfbbbu^t*WR;lA*#F+fFBBGfYVu&mM^rJ==8+s4e9 zRtVmEq>%xqi+)FHF#2mQGW_asa0OkT^|yr4NbY;uhhg{(a@Vx&p+26uHD*Nz+63cn zk91rkKfh#jCANJ}!RQe3#U@MRbq(MBR$PjEAcF2|;rR7vBr{(*_NN_|dH&C9;)XMe z1DT1-fExW5H^?{bYL$f6mCQj{Dx=r2Ya;YJ{S>K5Xfx1+?6e00$;bQ_>WpvqJm|cZ zL17;-W>o_szZc@7eVyccgu|m1=oYvmO84mC%!g1MjAx#GhC7;f79e?m6EflhG4HQD zD9qs*js0~aX4fjytDs-YdHIH0;&3+|j6WHN<2E|X3pyivqzvC9xHpRm!gmEa9|W09 zFX)dpD8zu*RoM0QgE)EXtO#GPhkgB%P!eZ?!H?|`WmpQA7UuW&M!{`THq2Fw@cb_4 zDBQOmy<01Jz2o`kMDFM|(W7NF`sUif-OLi5zu97Q-w3F8w8O>1a9I3I!ew<+Y-0v+ zk(NK6GcR02-^;S=FNMNFElkbxMfc@~aGGq(Sx6x=d|e@z8jLR5src|(8|A;tpnWbC zWieG)Wv+mzV=sjLQ4P$09uAd$Mu>==LT_6k^qza*X&2_D67q3!fF(}2*CUvnyr*;O zB(LW=uOFP3ofI{k+w}O8fR0)^Fl%xA|31)#G1j2y?D;^nqlI91BfxUx#} zdRfgXSf6-bv^}LGa(V%J4$;9pnKc?5ORzZ122ERB>F$lj(FOx_ds~nG^JVBQ8qhRG z9;eCCHLB1;{m>*lv@yYh7<%SA(Q&!U6MY^}$M(5-uwFwCB|p!4aR4R?3;+_WoLR~UU=$%``d+j9dOESD*&0OA(n;LjCxC&$ErDEQ}T1b7qNL|n1 z#C`GBtxZ@lAOGNeBywVmu$uSorX9$I+iT9uX$GcDiQt_X8G1)+(PdRBmT>>_RPBkl zqkK=;Emnnwt_QppJ7HCUDL!Aw$Dk_qWdfs-t{nrnRpbw!mSOO=U?`Cr*L z;BA(GTTxoD;JJ3s=OT=Bwu1K`Z*=dQj@+(h$o^D~4ih=I=v6Iw|5>Z`O>ETt$$7yf zXm2LJJjxiO9@}BW9~m?B_81Zng|*Gm80yHgnNtP3$pu(CpS--~U69rELi`L?$2&`3 zv^!7k{ZkwA9oS9JVXwfH&a|im{BdTEPNf{WI`N3}sD#w@q;g-St~c=5H}N;`A8ygB zHqD`T|o9+^QtwHAaxO*y2JH=t)LTj{-t;UQo#qix!COQ8ibv>!PL(gLN zKEGWPi_^$=PLD-NKRpa&XRK>1&pC?r$a@)p1Ke}|x7!LeZ)*@ou8QaUDhyrL0XLQY zh+Zm_@rL=K{-y#$_c>tXRPrj|a-aEi0>$|LvU^to=mX?XL=z zWO|n5O`&(oijH=2mKvO(&@~jpEK^X-eblzx5}4kIgXh1@-AG(dsxMjUdSfpsLsRRK zIQ&2r3RhgPE>aiY7Te&i8U64JI2X~f#yp)Qc#wZ;*j9pDX;HYGStj}1f3KF$&y839aMY`e@pVzmRIKm?=5=VPQqQ#u1OiS<=PIrmuTbcS}%kzHo?x% zwiv#x5Xziq&-fAyt&BK43^ql-P8GO%J{Qh49 zm+YZ`I1q1kW|3(?=kGB34S9c(yO5uGiR%?P_JrZyuVVHB9W*WU!%k%*#4$(v{(2GI zjyd4oeP1Z_Pej;AGgP;uzt5Mskr6fMIKNr6KHVX<2KqsBR3UOckOlF{luT{;3Nlb*>AaYQ*T&tUw&9r=d0dU2K4&ZMEc@FW z?{}mkZ-^tJ=|@v`OT!5nvw#xUlj;=`@k1b2a7B2jFn^T7dDd=y{2@0xF)1Ia(bl+O z$~jm-JQj2^K`eb`9Xg~zf$!bnuRaNXzYffmtD@Z@<~?II5fbA9g~#mSEb^dxZz}3$ zkiqpq3#-47KX*L_PY+j1{_nD9$|1`0i|FmGgXf%GS)A5Ixf}hG-^rQJaK_kiACMKIBFhT<<5c3tR1h~}Pq=mK`i zOObPZE{uP)$FZSL#1eBIO#B#t;H8FW;+${q!XgYmXAgw|A?!TIW4VbD8voGg$UBjB zPCOsy-xqdTPlV4DL!6gOL$;wNey(=M!@p#lN4R1^_Xr%EmdSs{{L=jfD3lf8pTP}q zJvIQh6k0{{R((7IS;R^og zO^N7E&i>w`^t`ec()fw{v}VBWX9A|4(Z{5XjcDkXg9&>Zu`cz4(CvO-#0040oK4vO z_rp7#cY*N$8J663#qmaR)4nkOLQl#JJ~x{V=D=fl4Nj(YMTo|85yju@p0A||g6%Ry6;APP`j^|LPJWNtIhUyqM zj9N+OX{#rW`6od?pa7v7W@zp@3-)6QaJ6|hq^@^i99^G%--;bu)v;oe4_-Vq#_n%U za5z$kAL-6y3&ueCP70bQSi;nxiusKKfgIk<)4=dlBuwDdUvpJF#8HJCd=97+t9Y%?5VD znbRqXc0%@2FDQ6rVdzJJ4f3-vA)yGW>J5^8>>hu92>Irh;yLrFHy6g>MwtOJ3LLqI zDaMJT` zjtOIrJRSoV+MpwO5Zd-0UhnS<>ESkoduy2uz%gS09EptKczcMT^X14qk z=dB$wamN$5ySNsk%!Q5&7~#U4RG6md;wd?f)y(O38P5IrPd}s_ zrLSkME;RY~YVMK?gNf2}j#B-%QgfC1ZgMzt{E=wxqX)(1GqCc#CMx6H5bVu-nzJ27 zANEHB-MpH`R_yrjbLmb_@L|qHG`@+?qko7AyR`7}R5E%u>5;SQ2KPPWb?HyRyR2X= zxsr}|8};BkuMrkx`_D3&4Y{{Hpg8x1Sm8Yez2&U1afvxB4$@Q3_uQZDPSCv^fUfjq z%v1oPdzY|Nl7UWJOC_&&Gq4?!Ll?RdKIAaGhOugV29}zCFt|c4duap^f=PtsYk!+p&CFZ z2Wv()kkiu%3hSH2pdb3!%FMD^tsXusv%<@DfG z^~2Yj+#{s@#{%Fc9X-?e7X^32YMrE zFxju{5xtfRz+mRSWj&euJ)Dg<&6ez0+0(6;grp{sq zs-t_DGYBR}z(@<|jVMOU1uI-~p*JEs1WPn5QQo}_r4D&Wl`ltl`7rbtbWS`UX@m)P zBXRh;DRw-yz^LP7F)$C^Z>$&AEKR{j-uo6_sYHNpA-QE$cxCoLTuZwm+#eV-h)egz zN=@0PkXYRAxUEH=q55Al`>M+A+UY zV-N2`yubCbfz=9sWF^MIyN-1?MS54Xa*@m5QDpE0^xb?!R5LH!k9X&*FPURXqcz$h za&ak~Ka<1euxrRf_6P?|^qPh65A3{koGsaZE47DdcwYgEo0x;J)5Pbmj_A_M1YzWZ zMQ~RC^pX>Ne|n?z9_MbUh8P!FicTgeFxpIihQ!ZVul9YA(W4!mqMDHF=m%>J_S`18 zU|2)}?kJK=9p(m)rc~7YV23-T4$su`QN%gB)b%0<$)OC-grzI{cn8DyUFJRDZ{|E= z$iIH?fU5;F;P5*M^)_bM9YP;BIf4VORbg;fIsAP8Oc-(BX>LQuql+=>_*w30UW~FF zR}4;>f#ZF$@%x=IWaOd-aNl}b#(VLy5vaT?4;@=I$Y%QDZ_e0PJhjG(k?~kK$N>xZ z{a4b8z$NDS%5-Y+qHrbxmeEhSKoRq1e-d|p>)^>EPi*1y;?i<=%#16B*HQ=gXh-7w z(GTp0Tu?c=lISW@yp`S zezi3g%w`WdtO(!OLy@}P#`ztwdfi*0t)Y(PbKKB4T@MEy+8{L{2i{k$aek;Tnw%o| z{4m2chZ1a>p3T`(F;ZW4#k@XgViKQ2YtGF@m#-7C;ticZOZeyUFk$zPXM$!KiW;Zl zLfiso7wcfSs1Y6SeHETMpM}~W9XxyNi}mDE-&MB9rj4A<(X(9@8G|VH<0if_!FZlC z7dq27Ke+}IkN*~jw8`rlG!`0ayi3>SK84@ELwz%$rtFBg2h3lm#gGZ2i+xswFer`2 zBRcG*_KW}SI~>p2eGpSNG1t7wA8!kcFg=5FkXjkc9qHkmYm0%CqcFbE5Vx2I(yvIz zrGuQe9_|FM-Zw?(H=3x@3&-z5Gnhm>plK&(a+_UoPGu%$rDo%Ur8$~8XDE9^KF<@* z`P_RU@5Tpl;F$&z?lChKPp^-vX+kj=1x!6rL~XS+k3PQ+6KwyPM&jUIl!6<)J0ATJn0O5iP=N z-8)flH5ry8yz%>tH9S;o(d9}3`poyj@L}w>N2TJxcQxFyE=K}8)+0C7O75{67I(rw zjgN$(h9;(w0r%%W9h8)l6Eq_m$BOK6W|$|e=wr{cF~XK}B^Z>Gf(c7XC9kKZ*AY`A z4vLul^tgLR;8-O0H_Qn8X@Vf}s@+FezL_Y`K6+Y`e ziEEx(=>Ls9({l2$9o%52oQn_a4d!!}THZMkT^(%kC$tum{wu&?<~BYj_QETJCE^0_ zVS^jUxBA-%{U2H3+e~Ie*IB`NNgxv5WMiwnCCtL=pntvu>3Vfob^e$*=YLhKt%u#iT@l9T^8TH6XiG@J_+DftFSf#I^+xo$oR9Ih>kvGsA4&$?5&b(D;cjIl zPVUi1aeyPjRup1fkpot~@L;An7w=bFqP4yr7L5hi|D_J{7u(@X&tKy2S^BteFB~>j zCeVFjgU`%Qy0V+9&n%zshFI*}Yzp-$)o|P=!@)l_lGj_C+zCF1?~CpS^s%cy=VvVf zb_wR_HmMxy?4qxJ$-C)!QLyc1i5D{d+<0!Z8AC2S`Gfh!Z^^mT!}7Qo#JthLMPDm8 zEGfh`SF(|`-LNkq3fuqUeAKKKuP*1{X{dC++U9-jG4j+);l=ajwLw9!I%G;_suiNy zbBliO1aEIT?0jRfk3Z+Hy5*PNAf)C$;o1!_l!Gokbsa-pq4ZW4KI!C79M8kU zf7rwAG8M}rV$d>K;43+$r?~H#$N;+3{*lysW_ML(^4=c{IY%{Ym!FOfs>WoCTEQ;1 z5VNk?;rCY!yE=4RDp`i~2i;aE-P?!Q>J= zvvS0KlQ6`!WI%k;!JVGvXdz!hoAYR?o@}W(GO517iUB{wX}cF<+aZ4ME{DS6z6pB# zZHaFaOL_NghwExFIJ++zxwhtb!0zeYkDOz2zg&@~NJro|QP)=!Yp3|5#!L&c>$X_2 zDvMoQM~ny}>-$(NepzYatPh_nBNK3p=W(gI(dfm>==k!ZNWDD?gAd!FoVhpSQVT@W zMS4J?J7U%oO$3bN_tr}X{RWf&*(5`t zEAP3^2IKaVR2;T8ggNKkSE6|@`neJv-pHY|`YX|V$^i13Gto3#k6vMW{8>_huoo^^ z@+TM;f3h%hivb;XHTa8p;J{yXlGmHPR2iN@?T{+ReOcr*_*|WYvCi(;bdGcCZ8o?t z%?>TR`?=x9e&6>RZ1>BDdl32a690ZuJtJWcIzeUkR*}IRnK}Q?ubF4RQ04$7(*o>j zv4UutijmgoXwfmps#MNr|0;w{Pv-UO6>-t}q|nk3C|{U|`G4^~Rl^4PrDga+wsW9@ zFS15vW4(b9b}7~4@AN|So818GKgxK~cvmRo=ppicBz=ez(HP)>d7F|^v(XVn!wfOZ zoQ%KM^qH@!gWc#DtbJOC^jEh;|Jkp^p1cT9Jywiu$<3c)y9!qBa z#U3wrRAJ&7&MovRCHEL28x(P}^pQAs)eyfAhU3Jw$ygo0Uh0ut^j%LUxZzaX7$1vs zbS0eP`8J9^ke7qYnLEEJ9O=Gr(#=X4W#a`rLHH{12^_fU$nbLcZ>Wam=ANs%Wp=Q^T8QMd5_@puPgHQc;d5X z61#z>7_qbl_K&kstXB)Amd|3^?tjI+cI5Ikq~M^?g!(*t)cz^JnQpcS@(bqKAr-U7 zu|E}6g`4CKCx!67O5!x7r-IN9%DCn z!ZIQr@0s_$)3XAlmgy)?BX8(YM=ZVnKXaAuFWO-y`SZfVD6Xnrx{JH2gjGp@Y9$53z2pW+V7&xL3=9#u+?FQrGym0&} zFhS;tV!Y!yb@ZK5Twe7 zgXYaG!f>1(3@v8jAX`J&VLVFj_{Ro=|8Yv#oVaWd$qdl@>qBL zy%<-afu3{^z0lEvJbArZT<;5ifBQ!H;LPSkq>duD=}S3UKhQ_yPcHn>NyzrtFA5I{ zxKCnt@0&UD206iwy)*au4*2B8&aYiII(GNO-rIArdzTEq)#t#+tS7>HY!RdPvG=%z zoj7xC+-82|Bt0^l$-^?4;)}W}8RE}dLAJ9VBgU2>lb#=`>mC01RHU1C#@9QO;X#h= zlk=LuHFuqnj+hE?OBVLs-PC$ZeB7Zg3;h)Fv&5MVeH{WA;@ z`ql=yZ^&X=Y7flktX3%=zxjK;JhBEoS~GdRsYUve9x$lABu4JnMN~>WvM*@ke2X;# zvI=m0Abk)O%tmL#BRz#|oxP<;qJOt5tzfz7MY6u zthKPBi!-*rF2iDDHym9u_5be%{+pnWfQz${8C{5`(Q|R{(o2zQ^;%qPrvb~2Avo4# z4%LIM_;{uOe$VV+W}A-9TN2QEi`nrU_Mg8N!r^r#q^|d9NhfSu^+&|z^7%>j@YN%H zr|W`%?ud~?UYW~m?DRdJIBJcs{?W_I6>)?3`#G6 z(I?XIeoiQIruk#+co{w~D8aE0IouOG6X_o=io0Q&c=e1fiL2(Q^0349(M32l&>1_5 z65(Bzh%#3*GU4gR3(dw-?hB=^moL#vBQ;k!>c#=#`s|jN63+M6P~K&am;_I4YwSM8 zK2*Gb{r*^-agKyHd)2n@8(|{jv(2pmYX-MRl>T=SW~&Rgya-ee)WhsnvQ(HKIlssS zjY=~RG9(@2=L&SntApDH-baylVVlqqzJ52wfA@8uvoRDs$TfPf-VQ@t*bmTghKzmD z%^n#@We>h1&*5!f*%y_omfQ=GI)9OxyWjRn853(?2)8kMxOpHP*H3H1=$RGFl4ZE= zW6QfEN7P%yLbrx}?nyP6GbbHxLuxVkU>Cetv02z|HU57OeJr^@A2)d6K@S;b3?NUU zC8j2Tuq!nI}=ECT5bv>>0srWY#PK8vOxb@=nFcXFKuGTPZ;EIp{6<__4A%uXgb zUvn2};E+Krru~dZ%zNGgj#fseXOG166m|MprsGkzKE^88LnV#;BF;sIsC(e-_(a_6 zY>eM2)d<(m#EN&^PelJGy7s>>)F!caZ4eFP0(zeK+3$Fm_bclyne&N4SbQ`+uv&-+ zt-@*J6tuGcB6U5hE6SL+<((MLd)RXoQE1yj&hX%=n6fVyA-(Ld|GGEa&!pfvnRH*$ z8?j&!?^F}3VXof~{U6^G2D^B!+BKa$bnZh^Y>+-B4;B#?=>EqE{j;N}7&gaW>^~fu znTL0#74ROWf`;5{Vi&mvv(hrL@D9CLjc%yg$qd3HI7`D!eMJwlrJ|*RdzMPH!{;P!4KBd(4q<%iV z%N3xz=eby~qRl(CP?(aZW59m!o=`qNnJamAhB@1?M7$bchFy+T__8Ax3kO!ib0p7E zLC3}LDFzr5QUHw+W;nUS5*Pes=<~@DpWJ~Jb%~JQV+66N9&3LkGaEDu_p9EB=&$d@ z!f}(adFnKH@6g5e3L7|nDuCxSYrKCHfy_gZSg^zlujMPCkzar!@=!3zgFAMnaJ}*L=V@1^567Oy-Yx|4m}s2yCQx4b>ZfthpxA0 zBH}djQu9G(P655s0!m+}U`Bm79{(`LJoY_<$O!QJy9^H=a-MbinkYBZ!xf86RC(&+ zejh9B3u0dLs1=e=Ib+|j7z|1^fO13)0*tdUk8_dvYdT=j{v)D+bG)RUd)(c~v=jR;VK{Enmj`+3YGv<)usRO^lT0E&HZ>TgVOmCgNh*TXAB( zalAG}C3D7WGhz9O&-Q_d*!Uz4C;PkO9eGG9yU0KLRE)k!ozR6IqYsuEsJu1*#<^(po~4Db6j!vWlIL;A z6_+~r;qP}DnB9^7oL+VK^)nY1mUWUj1oz(d$JWm8MQfxg?7P^bj6Kz97(etnooV@5` zuV7BRr!(es@IhWd5++~KVV|V}hvQS>aJia3>w#!bZua7Ss))+>!G{f|*t5qL-Bfut ziL}Q*bU|t+#h{muInGVx`(al$YVKA^&a+G91gBsBDmtp26Y>4DQMGv{qAGRKk?-ek zA7r@OVuKG$LZQrFh9hhDR?^)e(qv(w&Qc}mr5Pv+5(8r1C)`(KiN1_w? zS&AKy?k_7TP=A#sr$3Hobwfp=nA*b4u2zL~e4oB(%`OY}n2oN3RxP;cN%&9}&P-p?N^LVt4i@Psw)cP-U{7~CT<>YLJuKn2~>clzp1P6=@^ug5$5oGr0 z!Mc$B)S^_ZbFU&7K@nGUzKP%G)S>d#2`>-RL)^yC*|b9J-b`PCo7gQj*Wm;? z68AZ0K!xN!$=dau@OONhnDkNyvj+L#<{};Xa-A{g5q*P!Hc06lfb=oRu-|PBpY4_S zL2tqkdXS{fKi}TziqY9uL`U|5K)V0&#R;9)WLW7@Pe! zGLMv^bHZXs?RUiP>5n;_PmGzQfoBhW5T>kyb=$14;(0#0$!ziLhb`9bje)7XE{6TA zLYM#2+0`Pi=Bqpcg6{~E591*F=>~1`x_yet`R$p84Hq17CCCLDFO#vaMF+K;N|4`JsL&?X{-AH+O%VRO%()9^oa$z7hr26 z&%P4#$qg$9;_HDg;?r~;w7cU2zfc3*p%?S?ZSrzITH*LQd))Mx4)?Ws%rRDA>$h|) z_(>nFr8GZ+`~?N9VjoEHzBcx2hGOJ21MK1%dcoX6+#BnJ-0pt3laYdmvqo6rT8*vo z{Hz|TfmGh;=r7&T?r59vo2`xoJS)r_t%Jc|Y*5B~nE|kavb`G?ZcDM%2L|ssu~kR-!(L z{&4U2;w{goOSGae>$f4w;+M5vT-ur9P z4{^!vvZ$a(ae@DIn34l}Z>A0Aa385^<;1>O0KSi;^Pouwy6?*n&U5(qSnlUP{}h=m zPsCSMUHmoOAMi!b*n!w0K#ee0>mx&f?ywNfkSt=itTq0<2{3Olsdk>Ux?16Jg(B zyBM5d3hjr5sJmx|pF^!N$+HZ;(Uz#&?1`0IQt)@26K=h)Lyx>NINzwnShbE&D*GiI zx{+6L+#4$%8M3R$`2u_MR)<~Cc9o3cr7_q%Umfu~=?!=shqr(D`;gcdmD-b%dVW8( za2V=(JQF>vw9)%r7zSq=!0M6>PI~0y4fwvzw8GK3+&{BNbvUF3j;qsA<5q)~kPl+2 z<_%G_)(C-pxWBR2#w~vbgmS)gp|b<7+@8TMZ3?cvGr^4k|J=t=H z{)nyp6p%bZ6W`|9<47^@O&>WRnDhA^Ar5%aF$@a3BJm{__+e6kQGEaT^`bo@z)!Ul4Etb04nZ+ic4B5{ehL0Pc0^#}S@G|pAXswt zmCPJ@V?V%$-H1ao%us$L7Do@IVz7lCl;+RE#^WW}{k&1~`+Zg0Z;`n4lL(cgr|?n` zzI@fes&pp|m@R|M#~E>#gVASw8hXqzhVPteRBX)0wa?X%$_v`)^g}$cxi9qpQpacK zsn9%X#JdY?ls%W>NWLSUO_`4WEZOVfoVvWF1RKiIpdDY%j%81}ci)SlS!&RI?Sqh0 z^uvy^M~}?~$kB90&lFD_vP|QhzX4XRtHPOk8O)gRbFJJHOUYT^@mvS#l`*)sgS?tI zcASgLa5BXXvQD0G?wW?x-;D9ZzXsjb<-+R^Il;q*L-p7b5jH^$(VP9@zR?Isb;iN> z`Q+T%!DOo&w!Ei5C(jO-7FFX=S8~)XD+Mg#F=JC)Oy+|Jt(S>0uH zuDW5u7c!FGbKlB-;Fw=k*ziv#mUL%7j@`{4g)fB5VfGPDPRGa?6I|y04^gYs3sMD)9^n6Ez`ch(ufS5*tn(Qa7Qn2e59_ON^H ziv<4|D0~E78kBQxp9y_t?4+(&pZH5m8S+Btxand8d)~3_$-h=}z}Q=bxUJ%aNy&b= z#T;YK?a6q|^JaQu0<(zpjF@&ur^I98%xqJ%O`{_|0k}})hzM@_Y8M zu=dBB&pG(bK3mGpYHXq}+4~21!4mo2QrDB3|Mb7x4sBUALN0GQo`0w%kDYy)JSXN( z*#$ggj!W6opy^qJ@VPEfZ5vhe!})Ult`>}!lK(`=E}P>yBH z@%EYRjJC8?1SydF?a@H4PA2L-=Sc1`Nae*!y|%7Kv+_9$F7$wum zQgo*Gc#{n|(r%dhJra*k8pGvnDdIMzVwPGtr2gKe{ye45`J~?8nv*a5<@iI<|BXJ5 zC`aLasUBYc^1#k7GHiKahxlfCmVaj8!Z*$)@6_Y_of7OSu9o~hlA5c0tME!F8$S^X zHfca_|4ev()I|JRx|RMaz-2ub6uO4MyCMUpHT8g!8hXaEV9uO~)O*q2AA94~h`ZuT zuqNz>M8mzmCN$`++A$;>K9B8iP=#)rT^Se}Wr|mYbr{%|Lnd$yYA&?HE15h}XH16U z26wC;$FAX8N30Dk!4Ze4cyGea)zW0_NHIWUL)^6L$1j?NbsH1E#9-9FA3X*(O8LyGa;qaISfM*~*f74k`+ME5a<rN!s$bXXn9ry#ck=hyRTHT*E0J^ADmRVErxB@!lHB080(;e{!P}X3nM={ z#sP!K+>0HXhG~PzMQy2p!glf}_SP}m-3}WQ_6d!SmdxZ7qLYU<`d&1Ff4@?IorkQx zA>;^!!{LAte3Us4PASH8|0+oB+e`g(>g-j)oWU=J3jNVX*%4GasSCNEj_A6i0I@^u zu*2UAiVIS3g8Q2(-HbIT_H~QwIF$hiwaQ3dh|7`=%7&(k;#}+S|jEEWSNvy#M>6~o79!BU&Es}083HpUt%e-~oK0&)c3gz)(m55@K5 zW64S}nCHeFyw4f?rx&ideG)OeNA24hiJ{(lIFvaBWl9AIw_^_}&JTS%B%o}C3$Ca% z;C#n&D9cr$Kl25axfjG&9b;Vc%7v1@2Bz|J7CoT=@9Q1#xVt6%iqp~DqKa!B$R+km z!g7a3_?_(prvVRz)e{YPkSDiV-2^iYt6 z@wf_Ft4Ph4&;@?{9YoNl9fpP+yT4%PYTuf?B_&B z;t}srEH+p}?HlK`eH<}hK>*T{a-jL0ytt4GoNCL3RdW?;Ek1}JPY()<@X7e~A_}j5 zFmvc)k89IQ@c18BC~Bl~f0%<)P8R4XqnDd!@~sioxV3gTGEzT^RRbm<<}GvKCydZh zf&IA4Nw~S*5qllXFzH4TU4(+1boOsE$ui*Q?B6%9#5MMaJWRB(m9sLrSOZLoa)V~O zLR@cfMbuGpp40QN`2+B~ioNOi`B?ACb9n#HqVU=y;m;gSzvwtTqMzjRU@P?TD}juh zhBIfQaPM~vq7ND(>K3_Yj5!5+Yp)#Z*fj^-V1TfT zBbjcaOzEVq!vM}#-WAb@BJs~Db-mh}PWb8dQ9PK;zV*g-V{qRueyf#5d`$Tx`9fq(c&iGf%#P)GE*jlNM*V(C9 zufv{$b0c=nkHO5BTbpT)}yld-1CA5}F1Ifp$k{bVuzQF6l2z41_(PF|jeJIqaMk<6^) zcFx13u9wv7yYSidR@~3ghJAGa^8UwzxbKXxp9P56WR2r%qu{+@+#r~1vb5S~O65a-SU^VB|m#RFWn4f`vCfQ@HIiGvxNf^eNQ)$mCe4@uMCx+RI zDecke^;_Xrr-dHF=opu0{`ZqDnm)=9=TFX182tgC;?OJH3?H>BaQ;d@vaVI2t?wY_ zqHYO~1~nA@38074hz>r1=;h=#lCN@}T#$WRqtI)Y26BSS;MXS-W$P=krf&z-|Gpra z6?8FbLonw3;XdrT4d$AWOI2WsOCuWoXbYLWp%KhT4*mF||pAeKE%o z)f{viu8J;p)=0jc&-a`q%G6@ea4H4sN9iN@SuL85<#0~%n2NW{Orh#khc+u2<_>SfzL|qj_o@q2HmE{rqyv)28=~~M2ZF}t zz<{|Tk?0JQP6;^E*BZ#?JR>g$pU+p}Vfj$ZJKHACu=h~bF#?YN3RM2I#Y+7WG>^4L zZ##ZR@}gjMj_)Y)VpslC440y6vOPM$Ncpm8n9hE;XB7U4*2SD8M?AP7!_s6|ytkN! zLsr?C-P-`uR#u~^ZwCByYtgdkmr%0&C^oa-(0{lul!LX<=;?_6KIY;NdrQ3nd~h%^ z0n2+D!TM??{(6yt@z*P%{;4Z&d)^Wa9dwcNC=yGw*>_@IyZ5YIl+U&#Bi0MoG~+Ss zv?bQF=VZS`2BS_DbV>Ha$k>yj+(s8Qit(t>)gw2|617L^C2g^UaaS*#I~Nb*1I9S? zo*aLA`V7E(2>JF1+3-X>ZPCXqy-29B#~^rr^dM0N`8k#t?;VVqJ>gK_WsXxdmAEBe z2-j@s>siYliU(_-3ZJ#w*sK?TarJtz-OissKQH@RozU+VXXu>kZ{4ef-28IvS(}W1 znJYP}(*<46UKZY*SH0~Pg=ryL2yt`4n1p;xXmi5y`%|FsAO}7M+;@l9z_=+F_Pj5C zlB0s3v!4m4aoYI$$ea0WYwXQq2PBSJkXbg^UX+5lN-3xv;0_+`8g|#3T&?}@rJ_)P8Pe!wsCrsz&;P5%lQ7*Ad^e6|_9c}RZU;~D2FT@&^8faerExIkd zB@~9JVMteRINR__rqu==$R9SjLe6vGOeCI9LepDA6p(q-pZmkb zuU*vHZ3x2t7=66F;)H64e2igV(eRij7FADXB)y`FwmjRLN(GwEu4Q5k(vb zXpajiyXZhsYzV86VxQC+9OA{;U z3DxT_P&dF9v(J=5#q0ng7 zMVq<@KCa4x_c>e4;VflQ5$_ud?6HjekCBFjn3i0H5%KM@x|#Ql$!f5l5&~N%U1Xnh z#Ii7UobJ)Zy~_`0E~TQ4KB5zaRhW>S$qbzI^`!p&_QcEMpFM5jN+3Pi%85|gqlzkB zJItdWX+$91(O>P*X-*<^rmLaKtq!y4@SLkwkKui~!sE~@vEjEKdPXIq!BiXJ<}L{O zP(W|ED|RJ#qUt3(hCDa9hcv*aGzZ~wjp#eIHzp0fCtQ!KBZ_(915XVQL!T*H^Lfyx zpV{0COAKN`#RnEosl-p_nzlqU$6L_-|9Csgx2m@H>m#D5C@MB~7t-Bx(A^-YG=fM; zH=FKKh-LN6Isd}GOFsXfaue3}-1 z)7u)ovj7T@tWZX_>AU+murD=0`{N~WbIgOq)5Ul*suON4_$|UrRq@PYI#kHPG*b7% zIDh6M9`MAgo*|f@lSPlefYK}So>(8<-BJnXfc{vv{E1i^GXqF)I6bt%c9>65qp_b9Bre zR}Z<7!5fS1FEyw=C?qFh9;OZ{!NYuM$V{TQZY$@g3Ug4H%0A`+b1d}9fk$r(vf%w8 zA2}C?rHrxaaWM}0O7O8q39Lf~;@9jJQQcn?zwW!CvadF_&b7vU`g7$R%<-UyJq8{M z$4VF0jR{5AJvtrR7ZqUj{C;TDc_b{nHQ_iY7-Pbi&s=E>DI@AR{_>h54%8!(>%L77 zLrwV{_^05dRz-)`oBc@|>;FCxdU4d@7S2KtxjIuPIzVM8>-%gwnD_IA{qJO~9SGba zzu@cJEW|n$qt^wn9BJ9+$P95&7&$JNrFZ1s4gpR>Zg;sSh9 zPDe8LVn=Uz6y16xRvcEvt>#(S@k#^R75N<0q{DBxJ+|F)K@C{|LPOw2IsH7WtG4YV zPoRUpOGnp>AJ!Sq6yAs#8hRMlHweWy^x&9diVf}5k!P91SIZB{PeO5|!T>8nsIgj2 zABs{5E*#t~Hjch7c7%rG!sQ|y`G@&)zNVNom~09uBXs(A9)@Jb!hn3Ug&B*`w5XWp zk$=$9^$aG-LCNp4aKEUAagSWlM@I`G?RGHqNyQ))EB40RP+}B?&?f>fIY%-ZQU(@01`v2*U zvL!FXcr)sBY`tMhK5y6~8@LZ5UmW(WecPq ziNv6f>SVDPz)rsq4W+=z9W&8YH34Px9b1ZGSecOLG@}f?RfpnrsT zk?-k0n#$kxQ3&EABe86$2|NP}py-l;7{_9KEbfV0XWPX&du8~>dBNwrKGY_YJ87MR z9-W=wsO5v6?C;jF|1fkqeb9f?aW}J^Y|Q&&?aUX#+(R3tU;QzCjUIN50zTbi&t;Mo zE+s{x#cd8^-sp=Z?4{ofe~B!52ngBAX8&NX1U1AJ$3 z-v2Egiu}6|*!#aeQ8KQ{mvwl(!Gjb~eBi5yZ&AU*QM0h1s~$exv_d-lChp(uVcEw2 z>Y}l>uO^Ilk~gy}7FB!7F(5)3ALlfRt%<5gR`Y@FG$SmnbjBO@CvFe$;2bForLx)h zy1|sJfiisH`%G#;1@wh9e%myQzwGnfR`kKIp}fD#tPuGyhqb>stb=Bw?M*l=Q!OC3 ztq|*Z{@k~$0QENC#9-TZV*g(yY~ASr)71uOnP&ss`qog!qY<#U0K8Hfknd@kmoV_T{@WqZI0Wtns>W7PKG6GC$jh_XE$L zb8_*rh1{PGem_^wu6XpLP28$d!H*R#m^x<+W;`UXx&LhZDzw9yv&yiU6_0|Od& z`R}L6-yT}l;oqgB->;*8hmL+fN7aE?x>y!1{+&>9M-PKP%tCiZ`lGbyJL9`$???#} z?$5*eyIydiSIe;^5rYmFV|j5uH0N9qa)!!y9~l8nHxp=5FV%T-E_z4U;9FOJoVATd z-gIVQe`CJ(yBv(4REmnvZQ||L+oJ0fHT*s2hv9YPFAp=LW+)p5qbxDlG5`@?(e%w2 zAWy20Ih$#acPfI*{fS6TyDd_+^ib!MKy9E2LdRI5EuMZ-_6vF~WmfF>L?~97U?=%Y z4YpY@+B+ZfliNgDW}Wy*--BsN9Nsh;;@C4AD3bnskev3Sj|sTJ{MR9e?eLa)mS*hP z;B*Q4`whk)x2MAKr8=2Q%=id00FM#4J|_oT!vuz=x?$!rdb9Qe{$_=UQI(+abO8ci zNU`pGDbyOYac@Q#-jE+@zsUkUGV-v&!yb1h1Yn&?G`!n%aDH70))l5>MQAx3c774< z`>zNS&Vl?F#Ul3~WAs^NjicfD+;3f}<7S?lbSmO|>0)h883OJm!DoII4nCHFpLUzL zZB1U2sylx2Z@B6t^H3(|A#=DZ6nyDpNX>+Q0=>0H?DJ^k;(|gMj$ZyLu3Y>rCRwSX zhWn&3^Ujp$(SBUOzOcFjeqCY>LrzVjfqJvEpTKC?tFr`yDhPZ!jI@mD*V7 z=Yr9kH(EOKyd>w1u8t}2zGZ=of5?AP&BY(;^*g#=N55a&rlE){x+0D~P=?-lUuI01 z;Py}}yjq_NUrRfbu-BmRao$FC^CF z;~LM__g@9VJ|zR+BTcYGts18b^Kj!v751#@g>?s*OEW+VLtF--#-BCIj{EBlVi zSg&+*!Lp(l{8HCPj5qaoQ!{Y$MR|wU8@ueA*#7yZaN~NJ>}xFHJYj4v`gx9%D}BlZ z9@A%FYG5YnGc_^)a0TWqPlo-zDr|Yw6C1}r6>n6OV7bTxes^@SHJ-T`X(2`zT~iWa6R>BU&Nh(cSMhqM)>t63bqAmP*gF4`8dwCubCoszAqy4 z!eIKw5Hq(HqcM(|4Tp-*NAyA0UT=7VTsW&FC}fMb!Y`PbUq(cF<>&IUVu`Q{kU)ia9*5dRV72 z1F{$$UC-->G!8y^BYLoZk}B(u^G-a!McJXyCl95ZQ+{S<`L~m4_#9z?F7#72AIL!V z%kmDd=imHMc)s~hn6m%r`ZW;c!*!udKf!)JN3}Ng$V~S`MN2H1epOj~ikx>YwXSw@M!Z_vyciz}A)SnB<#)6+0YoZz^@TlM3Osxd=M7r$l1#c@fvm z49%TWFtIE5UFzpm8gt+(XNx#y0~!4z!SjcDaCMrGCHCYs*Ud-Eieb1D`dpmm{-s?V z0=@g3uN?)N&!^)Nd4SKhF!v`tl6-t6ta@Gwn{T0*Xu$qP2d@jb&g`*YkHv$TmKffe zgk>i{LiJarH4hYyrZhu}07IB8}y}k-=jk0hEYY~cnH6feokECuo zn7F|ko1bJMu$AZfzG1k#D-rGYJuxtue5ixWRP0Uvw5B|EGLv>v`BeC@){}Z~h`+^7 z^gm`}yQU-Bm-}IAmqf&OA~SkW87$i+^oEooO#Y8hy(xvYyThJ+m1)4;OXR~)Md+dF26Z!Jh)yoD zMwG7yPEtp0&)ko1CkoMXP#UJj7D36QMI6;=6TjxMKf98wkTyNoEVDpek35tF+adVP z9L$^-iN%BTA=_9?_F4woQ`y(cmBZc@QW%yq8Q-QGL;8jZV%OQ>iCs3jINBnWy~f-T z5tww-8ZA%r(ZftajeGw8_w}mJ^u_VXGMouc#zlJ*yfiUHJvm+Hd9IDWNq%UO2No@g zz~JA;s5If<&oBw!96J8HbacJ0M|xoW&c~u>sRo`kgkh|O9@dYx#{BinM^Yr;D9i^w zkD}4}mMLDmF2$_+ESRK~V)2TeIMcmeEHTr-r(HAg&0Y`vj#(iv*gj(p$H!z-Tew!ZIyX&JA?ZYvcWSr&q1a?{i~|9dwz3nAIo*w*HP+NCiF-=+iQureq) zB}4wdvJS7;(a))p(k9}y>&4Rq`pFMvVO^^Zv^gI+u`UQ*brr!GLv z1Ky{%dZWRu3oLBZP+MV#h5%!7MO?6CKpy^1bH%B`0J0KN@tj)Hx3OjL$jiZ^y=5K# zeyAIb_ld2-%ugBf-+Ex&4MS!I+Cl$b3S8beV)J3vr_?JJcnMTIC`aJySZZ6y6MxVH zw;p^J?ex8Gz8Xlanhws`xZ*`z4oavOA2rPv@5iR%-+R=0DOTZILLOTGEyoz@07vAv zh%V%*cnGcd;gmB(2<ppXs31$qO=A3N7Y>c9RGd5idbEw;0b1@6fCFEUn@cV5YE(P7Lm&9;#7T#1CyV6VTGA!F(v*U!78^XDdfQlnh3>-4mXxwBXqkg%L930(w|u zd0Z|=hSQHu&Cj*EMD*IMjfj0^i1ACoHg)Q$-%8;X>#B5jO*|PDjBAnl*ynGH0GB+R z)*)|g5w)E+W6;&!7}J&0s6Mn@T2nr3}$YEBYB?Q`D0Kr?5cQg0Yu)C!2h}~_Re?17n2OM{I-Ps7+d(- zW?;l(FLbf0#kHf3FF%Tm0)0fe2BGgeHC)%X<;*f0hU`b*%b$gZEfFZx zG{Q0VrB2SxMmGIU5vvq1y>pxRM^gn4=R4xxUOGr!!TePEdM{;LK|!0G?c_)tQWe;1 zP=d@|S!h2}giB@LL~!T7BKQ;gyD_e)kTJr)7W9$4li*mnJ-HCGvClFB$0UY`xKo60 zk;!nKUV=Awe~FR{Ey8lG4tztSAeCi+uLqss(Odwj?yiusio}++$rxn_^gUgLX!3jR z&#lJsjRT>sctgDHt^=jLso0ughF#=WW-@%nvyYw6GyGpiY(4FXnQGY>IEcAyCG&AZia8v2$~(NC zcbYUJ?0<>FpHxvhWjg8(8AADiJ!+`kkALordHaI#a}l)`RmNDlq!bPk=7so{<6|4^ z)SDkf)<-RJmlsUbKEn|N0(Ick?d#U1p5zq+4`vYRD@G}2fv?OhLDH9#blj3VgS#3 zjonia)2NPaJ(DdkmBDMEk}>D2-93@1&d?X(A$SRWbK)9P~r`F#D%n7#-+s0XJ9!RSx&O5|@J54~2d+4vAfU6x28;6x zIQMfw73)otlL?r)Paji*3n8PHiUEg8;9dMqC^~!*POR&nrO&_y>U<|F)7Q$3s#~LM zFl0(8j`}Cyc4PqFP)|GWWf9EWDiG4;qj(zmQUq)c-(zI3Dr5ACkFc=<_`1ojL9o$pubVa3N7|NIQT9a z)`jLc`Jo)Dfw{P5R|;pZZYbgR)9_P)ypkh4b(xoZmGhqr3GP!5KQ+o5S7qYRe$R-R zWCdtwPl0>)LVVua5C2SfA(W1(z%ASbM;w?-d))-uchi|CV}Y4_EugSD91BjVV-nS6|rS_1xC=!NN_=;>s;mm}NO(w=je=H4pWlvN2|=CFfG^X!*MGJP=ba-4J~}^#4zPew^O1eMt`3vAO^jo#`_rH}B=`i}^MnQ&NyNI2> zc)6mu!|Tl-)CJ32+r_=nnmAeNiI!r0+^VUZSH}2T3xYswkCG)Je={{7R#pPK`Yb&GYcbeTrmp4X?D1GtN`|}vT=QK zA>!wZ#^EtvMQy<}Z0EkP%}f_#sYgHVl!oLa_U$a0XPFxbUq3UPx$r-qOd1OP`Q9#? zisH3BP`kG?eA`X&-h^{zdP?Qp6LFI{BU15JI4~s!x_$InN79pYKN3nqJAR$2VY@ih zgPO2N){FCk(4uUDk^nDE+E{=em%QP7C>lG-(DpGF2%WY7PP#d0GpIpmOD_yta#wtd zS4G>RP`ug3b31tsny>P){33rBNdW%xxwXwV#_gM&zeZ%@bx1kJp6rXt>cNM9LK)%o+`*eyp#u)r$xZHPaGng%W+RE=0+;G;~h@T zvL1qekuO9p*YxR5_;-IehTOc1!$Y}UKlq$5u?bDcz-({6MM{r zyPggFO-o=gk-n0px$uzBf+^W9!^>oOhy(6H!fI2YCEL>sTp_3_%Q7x@PaiKCy7t`{z(C<(nGUkz|r|SsEmD#BGx5t_d^wLYmBc_!c_Uuxe zTPi^Y_oLV~Jy33TLnJjBq0yN-d2%g6m(#CA{kdUxD{N|WM-NE?Ob42v`^{=N>*ivI zZ6zLB$YFt6n=r^R!p+bCOq#}e?79QI`B@TuU9d6Q8{Zi4Yn3w$S#Q?z-<{lpp!qTdZ)yz?JcaX3Nje<%m*Z@e46e1d zh%+TBsQK3nAO3QV>uZhkFC@t6X@i6tzF1~752L8>Z0tks&v5cvJ@QZ)Cxe`jPa-iz z1GnDz;npsFe3r6RN&*_Q|U-!Xi2sT%}6~@D< zf3KW_ucNgP{L~6VcjO{-vo)qmIbwN80&~pt@W8H$tbYm0ORKPY&oG!uzY>dUIRluJ z1{*DH^!(s};@|?fQ1j$ZM$Q2lvfkMDe|}&g-VSC?w%0$@WRRgW`@I-L-|@KEFc`NP zb3SVYOZLRpQLiDj!yl3hG5mi+d|FaQrV_ms%@yeDDTBR-{)k##C0x1hj%Q@H#?;xM z#4-=FW;mgyBm^dUap>x52o>HZ%`ekX#s1B`CGEmyOuHE1u7l^f0qEIV4|AQEo2yP8 z8Shu~<$-wogSp$0@~R{u7u(yvzsLleE#}yd$n{;ar=3nG-W-BI9`y zgn|XMXO`ja3FHkfzW2Wwx>gJlNe zHa8ZcIhV058|_i2J)1`@Ozy;-1X^$P0YoN1ULNfnog|+`Gf3}Xt`9P zwo@9u8O}$)J+j!J^F|~d*TnmA^l`9`JwV_49&!sOopA#a{NSg~9?mRNZ0$-8^}ksN zx>SX|WwPk}^PBklpDMOd7w+vX(7eeC@4MxLm~2e!8G|WOVL0EHp3Fn}*uc+v^Z+%{ zi+gkbmWAsxWkhvh4Ii(GZT7CnB&SvH6KChOo``G?$9-29w4o46TXRuwE!6Y59k>h;LdehR8U`cq_a6vm;1wTXe2&UgV9sA2)C$@kp4+#-nAa+{OGxu*L@0f z*i#Swu8%Q)nKRG#uHP_gxOmS%*1ia8M~x9uTY!W`%uePxd##H!M*O}h?8Ygh$1pb( zk|(e^!UCBk%(3Nq$@z29fBRgVFk&{t+Fab8kOLX^x;pwfEoaaNed3x}o3D*Yrz21! zZGezJmWXX5Cz$-fnLYh*H!2AkcQoOAnt91v6Oq)Tyu+VUo($cXFRw*6&Sh-R&BT8X zbZ}4B8i%Il;6}0|eE-Lss-K6`S$yBo5387&gd2LClXdWOc62?DJKeE*!3WW_o_XV2 zqA>OIG)#Qpf|0SQ)Kc0Z{kARhJmb*9^LOBvYJ``jLQ%dFmR~zTdfWr?%Z$F%g`wCq z!U&XmV#(AzOrZ{bdO3FiNdsKS8pnQ+aigz2*;v25IbLUE`ej%!P>vb!#3^LfuP%18IrZW!Mf ziNsA3V4)$xWNI*uy6ijeYVhfcJTCQW7K@%LWBnn2gxoQ}Dm`o52&PZ*fFoiJ-C;c@ z0YSU8A^(x*czPr+{8J94c=}PUgVoxz%FMA*?A0HKOgUAbaPr>dAbv!XQMBysxMsJ$pzHT^l_$CdSY8r}qNEgjyUG5VK{2Y}aQx{I;`QKCDjvhnhJ6(;D&AZ*1m zVRDW2pVK_l>1jfNdiH4ta!|!SVW%m+&>56~0tx*v@l{y%Z!Tgb71%rIicr@5D9$GW z6LRBmdbbvOy|RXa$iunSz%~AzqtipNjJK zlhmiqD8@|A;NIU&z>96Em~h<^M;|VLY5x*vTUDdr`X}MI_JdGnpR#L1H0KLC@Mr&W zN-OhuC-j&Kqmpa&acM81WOjV+Vc0OV6uvwn_iOGJ#HCtubO(E;Ls;BFHoVy)|R; z`m_n|k1c@i-4v|KEJCHeJS^8-5Hi`?DEJUB1xPEc#q!Rt(S^}`s`cOo})5~iOA_OA{_Hu<}X;Z;z1X^WGGIl~#02>IS~ zu|KmAfq`X+GooI9R%e`M6?t1l8xq^uWudE2I0;7Ge##ASLj57*&6~;*LJp+jyZ`Zm zyFqncy;#|zK|gB*jvO^(U&jgSo)=mF`m<6xp0l1icUKNiJ3SL6 zU)aaX3c$VRO3>`=jmDHz7!9+>bR8GSKjAFYg_=<6>IyA#;h<8AKh@IkV1It&ZZ*uX zr|-H4>t=iA?F!aM&Gb)X`{K_mW`|8P#Vhuq6PHuZ5LJqt4S&UD=`X^E{km4y5NKEG zqt6X{1cl^dq^)`f=7*&7}jezj%|G=n)d0S)j0~Q=;QQjcf;pO z>LU{zFvh?gQ>hg_No`Uac`6T?uQJcS3X(Uy@UrQ%IQ-NA_hmyc4gLQ4uFpzn|6AMxIFZDJGAH=eX8?0Bz?|B!9fs387ZLT?PU7#n_nR!j* zA~lr6a*pMQUuTNZ&U(2ZvlJWuk-~YKPhv@^F2by$5qn4%qxkL`Xvkcbj}DlB-wz*T z=pWuqe(UuLWY5mPN||arAKM8#yWSH!FX%yCD+U9$>Ofqw$A5qF$m6m>(3%i(EtBAB zV~#C5Dlv5%-@C3A*zlqc#>k%#v5U1ZPB|F?qg7zr%L{AvGbe7p9eN()tZRBUawV47 zJbymy$L3%S_uIO?Js@xVS_JLVKv%DD3_h!e!vCDGH#`q^o1M_l(hrN4reN&@Q}o$f z1zSz}!}-4N=z7{GWswnfMU=;AAZ=F^WXad+%RXPv-8uL-!w#V(J{Voc_t{SBo%uVy zy`P3HpUZH3fD|$|-V%GSXyN(uFrF#?#~ZRn-M4HEed!3rz0+|pDh0o7HJ~xM1Yt`P zQ1yyA4py8~?tUYJpQs=|#0Pdhx>%TK3vKF3UW~QJ4VgeVEse$0Nyb>lbAKlQ`S#oPl&k~3Rb3SDZkM~x|G zk5vIX&ukK}<#h4+Q8vPtsN-=G-%A%I*dEWkgBuQT36@}Slo4LcW<8b9{?dyIxNd(h z>K3(#EB;d;Z}0hk`fCS6C;a@9j{a+0Vd3cyPpdR6J;vX8Y8k?Y#X@W$FTYP$96R+& zsIg!3A%fWw^7@z@YDp$rF2eZy>N97#a{zhaFX>;QPIY5S3L^R3hiy>AbB~+iz*05j zKa4=qIzu4W0z)I&kK{STIcGZ5ZDYw*GQp%v|MTPVKI8p8`M_`SHop^k_nU%6oOkFP z)S&jr1v?+7VJCeepND(Fmzu18Cp4jTp63_;dF;8B!eCApe0Ml6uBgyQ>6(C@b9Kpj zHAi?=Hk#sC6Ls+-qb>$5M%wsPT7tdo<2{Zk!@?qI$iI0b6zitKFw`5hAGNWp!46IS znb12;e}D&PRkz5r`Duo^Z>URmmEhrxLiD=#Tx54^78>+(by1xQQ!hj4>)E2EG9Qa{ z+%e_O91Lf+LH#Z2mdBPu;V)U((bd@P+zDF)Uy2{Q)v@s+=dGOu<~?#o`uIZXggoJ- z8Hv-|vT*sAKo75K{5dBfFL(ibhxCGs$7Zo}o*}IJF)yk_4b{bNn13<{M${9A>}2m| zZ2`W<8KKXX8Z6$F4ad366YJo5fm7d#*~vdea;*k(ruboIi!Lf`t?ekOyMx zBM=dmikH8D^RLU1@Hhi6?JCjH^?v;6jmzHmMe}XyG5&@j%3U47Pv}F?&%}eX^m^7g zVL#VQT4YLQLm7sdaISQ;7~Up*Fy!r9QE_c5-oD~I<{w>L{%8*8P3f33#u`U>c9P)? z!k6#=&?N&0T zkTX_^FA6!>#eFGs+*5I(>9V-`R}=lT{NS%9V1AP`D)uACUT{WKV;Jg3r(*0rb>ys0w@(d7s>iLy~tT96Gm(o`af1u9YUHSV!=? zX_58fiU?k}v&kK2mj$9HdzX!!^>MIE6)Y7pkf7D^^#cESBXaK6i{vewKh!asez74A zS9xGv_dHy^=!V{9v8ZPMVU~{t4(+JLVV*;N4y{G|M=8XAKP#3qzoEh*62ofs5PO2p zopL^F5?5I3&4hf61Y?@ik;&YuiERnkr$;?m2d}dY|0T9Py(NYn)`p5q6l%X{Aoi6D zTFP=!UgU_A#{<#Sojx1B=Wmb)C{5i{@0H|#=JP%qbX5fK{xq$Q#7R?4_|^lL46;$D zOPZ#ayqkZ z$YG@)RQq@dn*&)GBvXoRy$564QTCzf-TU!897n#mK-bLfBXWHuk!4pR8r|EE|_{mYPX!H_UVG?w*-$Ls=3nBayhnc=wkDh2zf|IDeb zKqB?{vL&noHcLS}^_N&p{i>#U0OFGksgJTj)#ZE`zi`Cl^`SW391C-6a?1R8j_}UL zfhEj^j%Gf3uY2On8cjTo3T2+CHoo0q-Eck^2W71h`!f*h55}O(kN&siBT=OJ7LV>0#w@YQpJDw%)=SbJh%en-qsbQf6pj{dT8!HZBwvVUOnr zvEbAl;jus)YdC*D@m`aCGixO3=fcU+39nQ9uyH^d8tUlBe#%+t>?8!OEysFtbe@$c< zToal9)P*@Qyk8=^uzqB%Y~CjxtGzhKn#*%Ub_zV%PbobpkCuziM9=FwsF@K02i`ZI z%u^P@-W1kO$fhks)fj?>qb!SB^of!^?8|Cm#Xo%n;dGBPl#c`1~4v6M6RwH_NTfcHJkgx zQ%C%r?19a!dCC<{p|zw6YEk4dD^#M#ssF@C`epVSkte<_6aR&?hhlGsx}&-DWSXH* zN-ol0MZtB72XyCDL+(W$9^9*d$G#ERFt%RwP|-lhuK-jv>mcM1*>JKFJlSgorAjj# z-4u#5qsSX2=Sf2?0rA62VHYBeF5NGR^YYUWsXhZ{X7q8yn~|ZKjep2TRT~q46CWaQ z*TfjTsYxHRCl%onIYa5-b?u|1@qP45@gh_e7aIeSnP7rF!&%pM%f;JzXY}9Whxd@UWhFyZ%Ke9MT;|r+% zV^mlU_Soik__`xwWHJ7Jlj!r!5C-Rba9&0mkNBCcb5FMaN?lOEOt{FT;?qKXm|ZDG zZlDCiyy&aU{3DzIHSPIQZJEC_ zas>Q`Hi)x6^wGKm!SSyav)ZYP-57=)?p8R&`{eOJ`uL=6p)|7!ZIEV{&xj`hn}#W{#Nww zNj*lSAI@-}_pG$UkCpTh?XbnKiLO{QKN(M)w4o7P2Gwb4s9{b;&YWJDGWwlJbJN4- zErA%r{43MFcId+U)7I4vorbz%vs4PKi|D)dtiS{^YX+%ye7zn%3dmvI*rYrSl}DWM z>xKa|vA-2R>1Jx&&7+G8n`Uwik3Ip z_>X=pDW@!`(}(-n$`cBQlMwUB8e4`IqhU%8yx0fr=z4vh^u+nqFU8?Ra)P!6Vd!uJ z+>W=vKL)v|S#N`%rqg*&h~$h=ANMbD?(0Y2VeelGRd0`P| zNHUdBhhl;L!^3f{AQZA`YlDWoe?F+7_7kkKQ_-AWd}riSBi$`-V$nLuFpDMtP7i_^_2>-_0V9b6Z0LFSK9QgRKJ3tq*gN+{c=1 z@ks1>9gXxzOURN3aDIO-UcD?r{iHs){k2J?lNZy;eg;AvRiF{>fxyOiBuuo$YWnPF z@ozmPiP_-msaxt#eeyZxIc(?oC4=Izc$t!x&>J{f659;O%CW58HI#WEhMaA zzT_EZo;MeF`1_4Z>yDgNPsHz71N5wkf{v;-zNT9-Q#}ug#WvVx=YxU05@4Kd0G(Uq z$QYT8*nNDCJbNJ`;-hGbH^6fCNa7OIaMsZg<2OUd)#yeHS(8Xk`RFmC7hI@t{Qrx@%39E;&m(W%x@Yde6_&@;{sd{vw`yy|NqNJ`?7W_jE^qD;C(r;S-%K%`kkS+ z>6!S#T#Hs_`rMjr0iEs4$=Xtcru)uVsu#^1^JMBT1=cJk|GHKJwawJY_3nh?ntIWd z^^w=aXt?*`oRWT?_qBO&9qNpSYy5%t$w=6ui?)ds7~ee$XAG*a?#XxYS+8EWNpl`U z)`Vt*7D8jK@%}RBBvnp0wr3XnMkFCpUju*4OAytaIbtZs?8dIRHsG~bbW9b7FFg_G z#{R?)Ycx=|)~@ah*%98D%>Ch5tTr|#7D1+KJYG@v=CEE4^RG0BDW?pu=yD|Pcht;=i;hAbM^Um&S zahB=D=ac){<>&MmM@J(pn9uddGOQfMZ0}k0`;M2z@ygp`+dn!8|1TVm9_dh%X~mi{ z7n5skP?S6q`VZsLoX$SV=u&L%B7qe7sPos!AWq}4kQ=LxEp1-7#Qu{_2z4mc^aJTT zpwY+|0lu*qoUVgto;&wTk!g01`Ia4APsyzl%*$HD`hQjNzJ`7LX@HuXCANOgLqVz& z`hN|^C;M3JCTCn`Ssuc_B%rvI+}{p<&i~)*CI1?N#d^O*_K_|Ssw#N)Qy&le14Z|k znQCT^?9HCkl7_f(Pj6EhW}cD5h{_&#y-*cP`?EgP(}$vr3pP3CAhFp2%f0-tiR{2b-WG5i zR*uXqIkC_&wKfR^Q0qdBFahUxD$2EBvpyvd`H33jq5kr5l9!klZ zi8zpo>l5jt^?EGS23`_A)c3UZj==r`4U|!5yqf2iRXiiFR-A(;oNMcU(nnc&8NTdE zCzG%o>+}AKOub1&?sbZp7U+36=W9VL%N9io}wT;KOBwWa}7{cNPS3y1gA|Z zar)P1v2(&bQKH5C(1U(>+0z&|V2k}1^6-s10MN?Zn+y`vJGL`jX4vXPs+_8Ut`}4I5UT%<~n@^jYSx>jPJpLeLqp zFk4Rtu}f{>GA|p`U$H(O;tJ_A(KtdKToL;y2GmP&8jj?+Zb*IgQw*BV+y~QWTwAJx zEo9kF*Ura|`Xb8>%u4`0Nr2d(oaRkInNCf zCCo&S4n^wWFmQOrI;sRGS*u!4E9~%k;nRk&Xa7o!-NN}$em&|Ha3dz5H0dYg0M?g2b2E;lqO{(NXZKG!T7D=3m7j}l`Py*&9EzG-n#k&9iFvCe$RhVwda@bpvZ8R^T?-#P$qyWn zfT6M#cwtN?v3;}H-9q1dd>975)Wg<;cF=NQeo1F*%xR;>;dm4l{jh)@ds!>!(>u4Y z6m@yB=&SZb%qcR3$&>_?pVYwfaORHFC;jX-`3x3rI8vJglV}aR;d$3@LmIY?uI}*X z?C5%XMGvf*d{e}n*Fj}}=7&$#flRptHm=Ntw1NZvXt<)OISxlIs-k~g1r#Hg&3>dB z+UI&;pT}d-$wC9NtLcR?AlG*&_lZjKW=7j#`DITu-b=vYgVgUFD@P2{si~|$lT;TR zR{12Z_fmz@D9&K zEcvTh5h3{3)`&UuoD)an!-el%r#?YQ-j;~;ImU=QNSd%SCHkIbzFxiaFYE#K9DDo}Fjm z&kuFX>gtS%ikav=&>AZL29o0%%d@ft)Wb{BD=QZtN0ImS;JMJ+^h5-lQpSJ70uk)2 z37dX)xHKmPXS>*9Ye4{3dL?7(8C?j@nFprn7PwO8AnD2!1km8QjgiflXJCeqwMfN zoB7&#k+`|S7`@(ALYj5wHY8GsDS$o$<<(=fO5x9Ia(8 zN?9`e$)LH?Wf}TM}A<|`pJ*AT0zC74{v{Gbyq*nE%k6>DeMCP(1h)=W(L zEwHDl8p96fKsB}+MS*>xDbpyD=c(cDTtAGGFyD8y19})r@Hxd1`QDBgnVO8&_0&0@ zr2pA88B-=z;OohL_#OOSj6AA=pmCwp98r6}&y_t>32rkFWln)B&S|8h^0a{6FzO=j zP;)<~3Y+N--mIr7I!aCk&#m7kW-)i0In9!R z$g!28uVMn$U9muTqY;dz@OPx9?qI6}Gg;gb;2w+CU&avBm+$?NgzkAIm>Vq*ncYi- z@7gdNQ?5memMTU^Ibq{)-e2#mxgVHA-mw60cawpTz8p>m3ZZ;&2~v$OiE9@ZiiZ+U z$SM?)2~Iyvq7m5$h4_8i2+gjMDEXBJoh7|ZEeCo4g@2Dd~trE*m#o$htYD917Ms0M1m>n|}U5k9sr$!6^b+^S=zhvwi=!~65 z?BGamWs95|S_9a3dpZZf4?4bH4?Q`^n>C815n6C>k3jaGDb$eAXMZ{gW`D?6Ni)SW zc?nunr;#C5jocrR*x5WE{ZIFVL3*QbUN;RPpJvhX!}(Y*J5&vjU|*gS8t1uT3O&Vl zPV3;($`Z^WLoI^(g5-v77?yoiXwf(LYcS8?oCDRJwt_aV^SSPb8GNSxA8%**Q03M? zeMA8TEbLY+RFv*qL&K&O38hq|1Vp5B(+z@+h27mAyF0NJyRc9Z`so8RGoQ)MOT$SkecVef!0Vtm_Hz}ZuHQeQWbjk`t{II#^5*(ryI{ZKOD&+NnO5Im+>%KhZ)>)wO0y)=y60}=K#_um@ z)RN!Rc0SMD3xTrMJa}ZJz@#(1TU!-yC}y`fKFJh{HrWWt(M1G3gQbshkaXPvgRW1) z2!%AB!?&81MalO;A9+Xav|HH z3LO^|U|D3D%-?S`d%4TvF8&|qWYfA7$c6Czx!)C$`gyp0!4o?6A!wo}Yt(0B>{qG6 zwGzG;&sF1?TnBa#y%f4%wIJ~i!lybzJWKbc2P7L&1#SpxKMAqBQ&D@v2EElQ;HO94 z*mve5A9crpLoY@9QQC;7E)_qFeS_1+!!wwCJ~LN5YMqQVX34O!W#+2162Fh<;d_|u z-%lk{9@kDbiY-?R;kq^)<%!nlIS5efPy~x#_RQ$bK*E2Kh#hBx3$eu*_=`GwSt-&& z&x+SE=fs3=lc}@jkvpV?Ufing_Ai2yjy2{znu=9%>^pPT#wF`o`gik?f2jttu2=WA z8|Hmf#gtimA1x&>l531fbAjFcd=yP{#i>)l$eSOH=l?h#9bW-Uo@@4T4lC>Dly$uo z2J(ny9!S~32;WP5pg4jY-UtaEkaIHLn7yqJL#dI(z_Yy*zULHU_>w#rr4=AgyB)f0 zej)ml(oY&Sovb7JO*i@DR#QHb_S#^DNJin96g)2q!JfP1z8KY_w?h#W)Y`#v*&DH_ zMxR-=5S&_~&%22PipM!`tY=SU$6(G><8eRG4h^p4etzP7?+m$AGOp*T)*Z99HH&dq z^>Ium3=I~#co{zd8;52hKf)Pi=RF{LB;d4(J&xzkK|>Wgr;c$B&-v!^fNSC^=X=&m z5}^OU2;=iyuqLw*Uu!&Yr{@&Z(r=^qmh+*aO6d49m%FF#)}%Zb7Hm!|l6vc>w*ma;kQjKtChZC`a->)cxJsO{+P@n0D zBb9+TUoaD0o><}eUe3{%rQ_>&*}q@is-7@i+$cuz{rC2HFkaOe(=%g-YuWk8)?xm= z|9HHynZ>-3IVSs*!GZVQ!HMPMQhgJLQs0Z`AzDxl@W$T=Lj*d|ukV({dx{$-zYoDv z`Pum4LEh0;=G3~U;@;>I$hw|~51CENE{W+DoVz~>$J6gt2%G4HoS{V+RqBp2hoYdc zAPt7P7I5!gib$7i_}(Z-YrjEQl5kXfd}NF-ZnLrCo<7bF^T4Xq3`nRe|2*xC_w)c; z{NU_sbu~^fACRn2PPT_CTC1OmtG)D*c;BBL+f%LL#qTNVi{B=%;S`_uLt=?FzmJEXwS15$oqdk9fL8Hhua;}Bk zmo&H}RpYUv9GM0m#JawEC@2d=3r=Gh#bcA)Pbrd8-p3_3+Ta6P9f<&@+emBQp|e3#;ACpV0I5QMO#*+};1JmB9Pyxg3J zxZEm4{cFRsCOK_6I_O?71*)ay=)rr(FBj_i?>#W$RRktXO@&doEiMkO#K3y?fefvb z`QQ8ePXSLCKNoA-jb(n$7a>oLpgzTu{qNbh*hYfhP9f-afxe4sdqllr&f;ti=4>v) zUdK=3{;N~sKIawg>>XNDWr3pi_Bf@I59tnPIChCe^Y3_k8fc9T8svZKXXD&`@*o#} z5N4|{i&|Z6{F^Wt3zjiAxrNW#7~YHDy3(UF1ulKq>!q)QUkdrywrmD^-!7JUy*JKX zaQyr!@#%stu3wsh%x-%4)9Q}Em1(GNYX?XD@pykM8Tu>#$3iH@z(y&ua|>`$RS`xv zo5Z=1I@mhfAC4uwC%j`WvOWi^e!1b&MISsH~bZ< zec2oScnbeL6FAIrz;;(D`85tmR`cuNqaoe~UQ-g z4p^F!gG}zf2Z#6}6ERr$g#75sB^dLIxoZ#d53aO<>#`3*@tFapB!oiqju{?bbifnd zq3Q>^A|)vd9u|pcbAg{@eJMPdOYXqGPjkWmd@_F^ZvQldd`KexeKCOCIB$$(?)4<+ zzl~=-u;YF*_O&4!GNcBDhx1YYz5+)Fv_+@8w}jyP#*i~?1LmM_*f}6ikBsvH-k4?1 z{NL?tjPTLI&co%H@0td;({o@$ok&;ljW7(bfZFt#@E8|?Kbs|xo4OEx_IP3HyGm@~ zo%}&5xvE>pM_E&a`Dbgf`v7xN9d3y&WhU^x8HSw}mI$8bh!M<`8rZvI>yB^~7qbJ* z&I+;eWf)Fhed?nMnb(`-*G%vUxnk$(!z@aH$NH8a88b-z^vBTC3 z9%0xr=^{IN)}-)}aQ{5&i#35U2)}EFKh)({ z*(4)y5wp26uD9l_0{$qqimvzd(7DDRm(d;2(BseuB2Q%c!DRK%V7k?JA(`>Lf zst}p0GSH#55U!c*TRPa5-D=F;&K!@hS!QTukA}8F9!&aDW7rXjPw|N`_A|k+k}_t- z)3K3x%BrNc*s}PG7{UH9dFmL;cam#gW{2ON)P;Rr;O-cNqPQ6BVULm7P_paxr{eJf z?t?ejAE(k5YmRAQx2A-dOH<@ca>Q`X)eL6w9^f$^UTdR~Yh#Jar%JHrCAliRH_EzR z?KTD2OWGl9@)%4vvxoCQ0pBw&@FGXUw9*{|)FQFvQ7oP<2Ktg88Do%*!x<$qe@?~q z^3Y6D!j2c3h%gbT9L!!UM=wk~LS62e50a$Qp~179Pn0#PP8B2EE(2Y!mXdM6OR$|h zp3T!oNq6#>INxlhRw`vqQKz1B$}J((OlM)WoHb`#<@mOTy;qWQT&t6VQ_rWO=Vn9f zZ4E;969a5(c0yB84m^iBz{`MM;5{>;*9NHBTa0^Mq_|$qEUA(bE+5+=j?%Ap(v#}G-ek#Dg zTsM@p3x}UcD*9};g5Hf%aKMDJ_vP?6R78xW9RAy=k1H>x(C250y7lz<^6W5pl_xBo z@c*gVY^-rN#iE6kSpJ!PHu}{_ty0CftOp|Dsv*#YyfykjC${&(Z|>1=IX^r%cM=i| zlAyatf|lp>#ojD{-_>GxIkiP%+C5=Hz0^>b99Q*k7>MWh6QC|J zMf>aQ@gBrn*}GCSjD0FJjNS@q2+ws|-196A(S@+u55Kc9!AF7%{!<|3Im(>#=M{}5 z@aAtWp}ri0hpWN6>r-(`LksKIj7ReuC;H6Ezh1z8gbZu6R3~8F->LAIn}{d6eD~4g zX!}_z^Ln!K<+?l=F6C!E_*-3+j$lQ&v77{Gj|E81+3!ncDvo==&GZN@w=JTQjS z@=8oM$bf0rDw)^Y65bKR&ol^6RqijU3DjZ?pjT&$o;(l!zG{nsKfLhDGX~{KCYX1o z97-G5rE|F)2i@+90Zxy^GzWpbZIW-=d~hHvQZQM?1?FF zpN{qOE767gf-f%3;-S7F3={+L-w;C#Yd;>E%(A{!w#6H@so3-&7IinhV868-`ut7r z;WI{n9(F9gD#!PBZq3JvV)8kz z{1Ij+8^i+x`fOF$D}Tlej$ZCq?v#%Mrk=32iNw#7=~z-O@LzHT_LGOueL^KFUXg25 z)F=iI*MqHoFa~a<-a4GR+0tCBYZHzB2-A%=TNL|PWmBIOCA|_9*#G`Y(7Z3X_%H4Iae4Zz~*+2SjjVs1G)4zLP z0&Ck~+@Fzvo>8`Frp`B%{-tNRC75IQO-R>$5XaRG5LV%j>o@q`+31Lq-Sf#s_du!V z)c@nGcK@x9u$uq%H>P6v`7-p-?hUo-AL93&Q5gKl6%UyAy-z(j>|zqeTF{5fcUsnw zFf@HN!m*A;xH~om-wqX_4dweD->@V2T?yw>e?(QxTT$1fgLO_5u;PUU91QI+>jC*MKb)u=h2wKBdr*_DP`0E1 z+MP4tv#SVRovw?y&UZxL1Lp8fPr`_fW~dm;bA(AQ+^9>nKAHuqg&8x^lC#^I{5FYc|LjTk2f^xxuzmfmrQDkfJtssb0-1FC0Q$ZTPI zRB!(wP8;Z;aTzoVluf{JJN0!Y*`baC>XP#iBBm;JPIrpt@5nh{) zitjZX$F_~f0P4S+F4?0noXq2y&h)s2;>!prl3Yz7e}#FHGAV8ban3B`da~wryHBTI zBYmF`@#I{5NWiH~_U)dvW@b4TKb8Rd`+LH+CJxVAEpUAbb=Gw9G3f)`^?~{6HaCRY zG#%`V4?;U9Q*0b&2j4wXRC+l;{ZA-9wvWR}_5x~OCX@bm25ysGFY9`;`Vx%iDWiH_ zi+Fccn=|^!D63#L?x;7WoXzJvnR@&FIJ74#>bSQT@0^uL&@IN!$};$G?SYr0+mMwv z3K6v)XrfMZeTps0w551TpN{ohAN;t)`2u_4kBuSgsXh&FJgI*URKx_K3Px~k#RlCkw1joE9S!|Ge>eb4AZVN z7rEIEAxqdpb;$`ao&B*pIS#v(bdma?1Zl(Q-Onw9&YmWbIpCA9(lo^Bjb3;}Uit(* za^x@M!8g#Z6mN8a9Nablz%dWHpK)dpj-^Bpm_nD@#z z5_k;?LJj$f>w6hPvZsVTjx@|`C_{%d72KQ8zVh2zXxHiut#V`Jkaw}vEE~zM>>;P* zheYORA1kpIIKB|-&Y3t_UBGU+9pb@(mBKlY%z#6SF-UVbhR=6JC_Nyfme?_KI~})% zlOgJ&g+I5J!}xOvIvrjj^Lm;p-C?fxTjcl{;^wRAi0@)pp-tqRmPmxjj%^|C+;~}6LVjk-g_rBDPe-Uv1pI@Ua zANEGnHM|Q^_2G|LF+v{8$LgTDoO|U{LwahQ>0M>6Aj=KS{Q_|(G>$B5V-#;K#cZEc zJkTnKtn0~|5701G#p`*G#X@-l@&~3uyk?)|9tU!0nGH;KCTA^__vd(|Z?ockn|B>5G)z6Fj}!Nqfm$1jy(25( zyX%n9YS^>hpFJa{wcZq) zE|{akD9%yVn8NOe4ZN1|eAdSaf5Q{lmoN)Yr<>vUHu4$7cz%;rFCXc)LBvNV2BE` zgi8#Kkf>mb%9FX6{=gRI%0AR#<6wTs97mc3$*F?MhhMRp4NnGbYBYtBv4w|*SwrMzplGiJvYt95O2F!GZ z!V-kiV=|5RoPFE6AoSl4aVdTX>K7U!&70o-HV(KkFc0oUzzy>_9GEc$mOA6nz`LZO zcRqB^WXrr>rphysB2Eb7ToZctI6tame`yBi4)gNR{WLlGDd8AVL+@go0iwo{r^CL2 z4zDV3{b;ku3fUw^=<1_bWfpFp(`6=CU|tooJo4~)8=CikjVK=_et#ai^p~5Z!R+L^!a5f(%w+Vv^*tLd!G{@ zn#{3@UayZ2HK1eULS{-fH1geHX&H>+JmbFYH3qMW=fHc;Y-micMQHddkvX?T91k0Z zLIUT}`e_uoNs!b<%VAV2(uFtyP`f4eAb z^E8KwHTn9-aYD5O))hRp3mkz5;Kn&z~ihhl*s9k^q@YW zmy3ewPOy7C9UXVi#_gjH7_Px=G@0iw*A>dVo~*en>1{cXk|0cX8p8C?OpN}|6fX4o z){)9y~>cKi@V^K^L5orYm*W>C@Ky`THp zxe4xQ@9l@4S+kk>wScX16^hI=(er5)44d0RHTIdv^&$5&d@79CyHNGNbL+)INc~)4 zlpG0-qM3YGTEqBj36d*vaG&Q8S^4_yygQ)pzb~TESs!u}reX=_GuY_~$py|W^4$>E z>W^OuDP)feG#6Gus4^!ss8VJELV{!gz5RB}|8&z;5GdBmHsVlzc zbM)OP2CmHUbyhBc@jCXnpDRI9xje!iD)ODCgWg-+5y$A#E+8}s2)@St!nxT z?aI)6`xlv1tzwavDg1eUIeE(f1>+{5^0E}ArH;^`MrnUG9;OpqVbPA9g^eY6e2H^W zecAe;RaUAXRPB~fUt);AoNM=~F~+J*_894%OFy{_-Y)P#S>H4m8?iTVZxJ>hNkPtW z>X}Dn>+6vde?{G+$D$|oke&rG@Yqgni643JrBc{F^gwiG5Gr41AjQ!XCbuh5MAlMs zOcnHuzlkjJMpu5<#mKF*plQuHO(eG_*KLD z;tp}Xe1p*Coxgy-!U1Ec_bIy|c3T0|?mM6?HWBadFnd^PhMx}>!P_m5d*edp$P_Wa z_pKOvL>tf6`tq)842@Cj&E1!S;XLm~mrTa-=s08?ut0yZJf10M!nj8<0zF@dqKk_~ z_zypv&@E$c8~OADY;k-RGfnc;XqQH!Y!JOp?A;E`t-+7_0&M$L1NG#th#lH0R3;f< zh-V@k=_{E*4}|k|Ahffh`pPPoC*3`%T(3d`jpM(B%S-+pGIbr)dec}J~kyv<* zeY9jT^wQ&;(#9G2m$RXF))s&1v20oq3k$N9>Lg`ISVkTY=N58>9q7ThAZBzhMf$uL z*w$_VR7m~50zCP&`g2MTjCQN0rA(S^UWyVT=)lE)(BdIR&iW8wL?qIB|DblmCB zE(*OQgA=Mh4m$I^#laolM54D-Q|d4$1f zIb!ssOfmr+;7Fa_p?xfK7*w(8LMtfaqXxZ z3@`bjiW!Jac1GB?v>Y+@smSuG#5BoYF(K_1^No6(BTUAvWoBq1&m*Fk`Oa6KIMgi! z%YriS_mBy0rWK?8nncXx`$g9EWIca=Rc#fkOh1YAWqJh?^GGQM=HU0-7j&#))42r1v3w5f|A|ta5T@y`ONXStPzaJe_3#B zFvIJ0l~}QnJ<7YPQM;l${BtjeU5m{S$z0+5+w?o;In!exrMJfspmx)9TM{O@njz7# z8toD@$y;Slp^WSK1pF2oUVagYhTM-HPGet$DNGkyBZ>WZd3^SKTEo$~cnU_i)3+uq zLWkc{%(mgZs@q?&%2omAPmjimY)c%}HAZ{-2LGsKpt9DMycjYUhlgYEe=dA33Ng%f7r*X%Vnrf-e0fRey2}7(?-b!Lwf~je z4>j`IASmjM_`P2rt4{~uIr)p_pKM@E&dGHhCp7e*j_#(>ctIcazLkZDanB~ZzZlLV zzKXmLdqo3%CBw_PceNdbyMtYEp@p2*BwMH(dBN#gJTCi~AZI)0hVRo5(W3-zew|Qs z{k~XsKpWRzPsUdI!nX7!zpgf&d1MJTU-v|9V=9CmIi=)XL~tIrc^moJZ&lDY`KM4B zq))vv5W~rn%ev+SBhDN1r&1T|7>w35_PMRG#NdtPm`PU2fj*TouXq04Ke0l+QAC&P z!REzel&rSGMteIrFjpbDPEPwuW;?>;aP5&b?3t7KRh@%|wIxXC-xiH(Z$$t(!Na;v zLGe}2$E4O+`!koEGg}0bjhy^&Iu3FkynI>_-0Cx7!~3fKxmK}q={-@aDUi|-g%T4Z zbbrSgi&ZhiG#j+EM(~^(1L+?#XkRL3r#1V!AC$sJ^g+MXkA!VL=e-@n@#VQ5PVOS# z^koj-Wjo=ip%YXFr@(EY39cqqVEmGFSahyL{OZm)x%#>2n52)g;%RWo9EWSCslSvZ z;Z6X3^e#@=#s8P{+4~WySqbkW>FAtNil>)3Cy~Ap(@*H2!;%1mo@8JBM+f}vBt?&< zt_Ym$2i=wg^u0ko`>y}Zr;~BHQyJ7w@mwDJRD7X7w_Z5`Z{!SdfgJh7O?mifJ3z{(p*;<+SR#<~Md*qSv`?1gqG(rUL?~y_DZA`I8Xh|V+P#!4ppN1o* zNzk%4L@E80+VqTHKEzx|suCQW6j3!=lOAsibV^}g(M}g!z9+@ux1MM;4Mlb&{oLd{ z9;NoafO`TSlHViada6^@uzuV}v9?Sb-{%J6(sF&=9wNcgJ?ZGlS9IjhN^9E`fb3|2Z3rRbBOAxvdnp zw#TDtXE}QW`{S@C_rm)Icr)7_+f3OHo9&H78krdPl&lLYvKh~(;$o>Ff4UOg>r!!^ zdXKE@4c0m*+cJVkGmd;Bb*!kRI0)4 zyQMPgV6x`9E^^+HF!F-9Nng)8{n=PJ$QlmH_s-+@LoJ-7&0$2svH#& z)HRwak#esc_r*q$_uCL_JlOS@WQawvip3>zF0an2qW32ooAy@ot~CJfK7AHBYk9Bz z5kwCf`w{Z&;p-zspZP8rAL#(?vN+5LHNeUj^r@ang!;uwnb-T!sL0OA>tb1`F1!xX zW50^uKh=eN74~6n^2Ef20r>ua^A7SVuNjo0`8B&==tGnB`!&UM;5$c-9S|cBtZ9yj zHGt!L2h6o5KXM~|(e>;PXJ)#h!U`_G@}M2beK4{RTUK^K^M`i0YRJCN4xZ@I!5E)1 z-PlW?jrx`DXxrBdt3BiKZ<;BTpL4$An}S<973iYY1;dx|{cxE&?8gbrfScg?ReLKw z&@a^+lj^3UVh<2(!*i}S+3CmG!PU7f4jMI!hE#3bZ}mjf08{*VX^Usx%(pOyd4H%s zU>}7xD~vGzDm8o81ZWPGeZ7XQa!~)TNi5rEg*CpR7^|&`UB7&=w|5R|jA1=eU?ox?q zaLxr%k?}ej!d9Ye}9ZK)zJi(cwpWsr5f2db)gUe_XKXX(K?WfBxO8DPUqPu%QBKl^@W^B?%3BsB@% zY3y7JWdD1(6t}6@1zNPl^`eJD^@0&LOeX77)fi^&>@kTNf3}kg)K>aq$AdU*ZZX7V z`d_R$XI{Yfb*JUsF{<}RG3J~xVlAg*|8HX)XlsK>v3XEmWRH^*J+bjZI8M-Cw_r7M zN9;}VQmMe93%|t0bAN<}1Nq53k6A`Cr+D5C4+`i%<30TM`A|3}#p9WoAh&^iFOBqV zPcMS3`BsomKMf)BF4I?d2g!0`xft>EkS4Y8NVkKgh$DyZP9@#$L`&IKl7?1c)v zOzR8PFQ3IVRUN3;`XYOaE_NPtqBki6Ipmk8D>$KYCV9qPtsuQ#hCMZzShJY-KNKYk14H@FhE z!|By%{3_nk=Mg&07azWxU>f^O{NIun_|O$LYeTU10lDYvtgxB6K|`-B7`Tx;DdW6D z*7bIDX$zIdUj_7aVRgU@`!tPlJd#}6f7wv=mf(6+2*%~I!}O>ve8-irL!EsS0_G+M186Os;;}SOD+dj57=X9qCfsg;!)Po0k_uDH#2~KZle;J*IN_zUKI4a zEXL_sVOT;scK$TLs025xC+|1#izi~nM&Zu?p5X`7f73VzXp@I5xf+?*o4n_z@OZ-R zkAs?+b=?ssMj4~uD+$!O#{QS?xJkWk{;FAU8B1MiK@kkddQpAAyrH@>o*rlt3QNYK zsoV=UD$Mzta6uA(FA25I=%XHjdHrT#k-i<)SCEgnBO6ZkWEO9y&sg({xR_;tQ?Fyt zi#!Rv$DH@hB6o6y3(8{1qU@23qe{lu<6Vxge^asRMup7l$=cW2tDqBl`92qO=#N`S z5A2z~Ca|>PeEI;ng`I8Db;l&M>k^CHObN~!7DL@24^g}G5!s}Kkn{h<=*~vC(rFrw zW*gu*=kFa;r8sLQLEtVQxLD4_)Iej5URH_vZyC^2sA4y(JTz847mF7elgk_f-45g- z262wc`B={=cXGlep+z^1>;N;II?A4y^Vyg;g1MJf%syx_A7{@#ti54qm#d8t4inIc zeK5WMlc05CD6Hw(HC}Fu72eFBd}bfy$xj(Ozr?L1Eu86Lho$U8!#W>)7{uqj zog@5>Bau9G7789SCzn}_gNeoL`pd%ZG~3dY?b-;|uS(etv{!D)jp zw+W3M}A{bjGhR|!C` z>471MST&Kl)TkbYaKj$@W^OY#vFArPyB=f7f3ks#5zn8m z$W^f`mU(}Xb-g)ey}93i5I%L}JU3G}C0Fd!e0DB9V!vDmZ@e+`ffwgoLw{Q1;h##3 zVy<9gUKMgi^+5cCr$Sn-jdcOjFc3P}8Q_AhTT-Dso4!f~N9dKtVc2*JYKBj0l-4^i|+2eHD87(K+S8zNIL#f|49xOsWHQVK$%;75j z6B+j!Man#F{4$*k*I}mM@s3@AU-j( zi6I;&L}26V2`_ub0Asyn7cQ2Mo9Kg zQBpq&i{;4Drbb8o{fK|j;JS}+dqT%9Cr)UY-D%S5i+WB^Kd><4I30(4r za=Dm8Wk1m6YFiARSBwhw84b$_q$ejD&3COZ;W4?i&+?(XrwX#Jm)O`I=jBd_U5%FT zZYRabn>vX6YJ+dFxls7ayxLF?_>V|{#SR@P^r%F^z%&$Ise;kGVCLq!dmq(=pPFJ=-$sOWx0v;2Pc}`u@|0Qfe@g+8bdc z`Q95IW+C(&=T=?A(Xc%cKkxXUXL2R9_myCH=OTm-?274|FN@ic#@IeO0e36egLBFY zb(`sfS9C4Kk2k4pIllDHgyzR0$of8#bv;>oNgc0uW~ZM5?40QDsc^y55KFXO zM;>aAJp3E$g=2*wuyjtvlf(9S7hi@6c~UIcRVMTNLDsopoaJ9J*x`wYBQN`_P7s3T z8ev8UH;f?{x?j8m^Ejsr8_u4|@izFB#JSjq9QH{wA1ULUGH{v#mhC?$WDi&~Y`#v*2>%yD{Hy#U93nE>KbYBz)$wPrc1lthlL< zLCah?uP0ZG-rBUOWD-Pj&ehhI9e5S+T+BS^ic%~cu8K$gmqdpUJA}?)c6o{ga{pPP z)68;AuNL^~%h{eyBC?b1F#2yP`kXAr3HDUTsuR7h?0|IoLxM*b;B}opax@H~OjhLO zjXALI%K7x9NyvDT1fgaJ!%4hX9g$*!M+r6!kb|+}c`@LYDVG0@#Vj)u)E%>i&q@0I z`Z*&zI|!|lW~1qu9=hl;H`XSVIrVaxdp{mazKgB&`TzQCfa02Hbb&Qgg894l%7w)Q zZ(Lp;4eMD+$d9q+--dtx{1n{iGzX8$+hE}4U*ej+K77qWaNfoUYqZ_4W4aWt4!Yy= z)*z^AC!#OsD@|=HP;mtfjNB*xAP~n zhn2nQ`WEUuA_{4xvV)GS>@Ikz!i+Dw)rNvaVNquPwBX$>XG_5o%i95KSHXUWNk(pOK<*qB9zwPr}l7 z(RdqdhJ;?lXzZ7Xb?)T*t?P&vDzC(}Hd^$m2Segyf;2-1jA+gwtA)DcC?Cuvzh}cO zV<_{!I8Zee7n<0=BIEqsFHIiNlHcOMXKlP&R(?ut4x39eUUL7{~? z%)2$XcRrbZv`UmXzY$f{2_o?O4E)oq!!Abyc<-=ATngD6nZUX4-uShKUZx6NC|WJV z&#t`VK3$03my{voUJAKnT?9NLm!sJfU;enmt0fl~d$~h8ITTs!LbB~{hXY;8P{5qY z#wDd>Daz&}J3pcwVy}G?Rj*C(E@3A8@0j8^xx~%m3XrkP9p9R!L3!zHjOBdxt{lZ&~1+;Dzr2nJ4@g^6*d*volZ_h-r2 zW>N}iV;gd=o{2p-*`IPI9Bq0SqC}4US|+(T_0bJO-UeZcI(hI77O4=?#Sp;915O|8#6-iHk@orE$S64@Lnw&mTmcaJLO6iPcD2Wd7^8a7uwWh;XipDwoOCHTWUo zzObK*ehueSlW^;<5pJ`Od;b_It|q%7uzwKznSVc?Wr%rM#ps)v#EefFQi`7mhf?;P zy`zt|E)YY=EeWI_VaMZa6n=BW?kC~c@F*72F;+-yW^dI)DI9*0M^^hzC?>oV+TF&n z-Px6z~M}*^TVGIf)P2dtx20fP?G?&spSlbUf&s-7eVa6!sx$oTsJ*>LM zyy3P??9X8S+Kss|#Y8wH8%r0@}Yky1;uyrW#(ChKWK|(55J0o!^XjL zMF7tH@BPx`hK{xZH5VlKBtg-I{lCGbh~34GFZ)u+%46Bnp+8!-UJc*--Jjj2WqxD`N~7WZpE)e|R3J|+54C6L7m#ti$b-sIn)5>Zw?GTs-f(a3 z?t$BW^mnMR6K$vi_Hj-fc_9iP>?fdkZ!t=imSDL-39{Fk5k~q+}U&LiMws`;8NlWcZE)h6&z z$K2HZ+=sq8LsI&L*GS;u6YtXArJ6BtJ%0zVg;-IWq3}X zzg>@V6kq9x_Om{UW<5RRX@=v^YEz6iC9g1%44v8D_%YEJndD&1*Czb6;JYTl}+7Yye}1#UhAQQGx>;Q(U(1`l=+@o`mH_2 zjejX5&$MvX$^+T#ow%Xm2^)({Ywb$;W{xUc!t=0{JTZ5uhXiBLSmsdDtZ@1zdpNpfV7z{*%3@fqeaXa*T`B4zF8 zkP+;S_85emoT2Q>ep|pQTT| zGE~VG9m{hODzRmLXc~TYnu#yWmRuOjo-NNpR8P&9*_S6PZ}d>GBIi~uV%l8R2O2Eb0?B{fk^Ur(Hc*?lp=IiCMx}ku*>R?a68nN{e$CRXX%P{jb>Of${Rap zIfk^uFZSN_6&MRzQy_HGkE>B2}c`0RHY4WOsRAFrXJ4Cp4?eWmxo8mmJi zIIL~THThkg+AkV}cW(3x4Z>GDmDi%9vX>mN+^{S<4wE^IhIP57{idep)tcRqovSXW zqd%-j={k}v860n5&(a3Lm{f@0>88KGnV zRPt6M?mrn#cA5#6w>?<3HcNQd{os zih^j-1)G?0+0z%7WBGJ2^A=unB=ugz5S28Ts*^J)Z=A>dW-~PAN9GNy!NHDMM;IH|AJLzRZIJ;U3zvdQ~~&8syOac?p`<6B#Y_cznM`wJtIj z-&ZUR4jG``LO7rk#HaXB>ex$exIUgr+b`y z7;i&6(ee0AF{C)&nX|_EY>V*Z@@L6qGfrV;Ez$dludzn_^tEN4vT|NyYGS{tki0?M z_Y2~mw=yq?bH&3jht)bBd=B?#%eP2QpD?F$zU=wSQZSn$Op*zj@%%mJ{Zx5+H7dEW z2}Z))@nkT@0g^L6Bp-iIKV}z(5xpjsm7hibf4_uAM-yo-dL&K%UNq%`Pgv2OJ44Q> zt$!Gje>H-nW!8}ENQX$toV9ZiCPN@bZn47YA55R4<*XF0-I7xkEPUKbe3Q>rci~Iu z{W*b$KZTDjzT$sGtJ@_~@>d*(&~kqg52Y`heXf#3*_Uq6o`$CT4o%O~bl;&V=Tg&t zY}uo4sw7>H2QT`eecw{{Q^KdMb|z3QKRg+nfR1(SD(pZRqNONLmmW~3)KMEC1?*`55Dk?7Tq=VL9!FnPZ3 z=$DOAdAY`)PgCC7kxdO~V*5mm`FjxchD{PJy#<9AyxAnRocn5TS~!I;`;QcsUa>=O zbro*LIn0W$V#LTcSZ#Qv93JSiC^(q5eJvTRT>qCJ_v5;g1WpYkv#L+I*un_tqz>;du7To^w>H_#^jb%UnhBMH2&9yDN!hg*N!it0LM#_{mqK z&*`a|o@ik2+WdD%n^QFg7(Dl8Rh$-Uro=`bY4C&ye>_?zEn=z4Vlnv1g4^=wsi5t&?uW8&hC7_Bfpzy$>Hd&i1jb! zUVb)@5=z*y^rW&(TGjpe#9CG8xoWe#J{6wx;IS?$Pkn`jOM=U}_plCLYP2whab zWtneSO{^^L}!%e#s- zpJxr$qusM-s^Vlno;{L$ty$(AdnJC~Rg(YI&4rylUg8yqBzU{{sD#lO&@h%2*9vK{ zy}o4hJXPOgjj`wx%EgfuSWS?9fcV3Y3%}yvx8Yb%kLT8N;ePv+Qn)>hzT&GKuHku_ z`cum~)ux&KQ5EpqjNjhJ^Yxf;^1Xbx+cJl4S)Noh4na2{6ZiiHGr_e|a&g3;(z-(9 ze69$`&uZuI&s6^cQz9FN5GHfevu~W(DE=$M+5WV8HS}`N06jPfN{?jUF%%C@_ z=*wqy;Db4OIl>?4DD(Ye0piup!dJMsyM-yT+#-^wRUVQZRe`!v#9{HHYw8a;_*qXj z|8LbIOGAckl^k2iJ8OB^pMn>;%z5a`^-iOSnV8984`*rxi+{GM=+DwC(3B^q>3N!7 z???ZZJ=gXdDo1?p-W$d<`nN%JO%s0EAJX^Na^d=_08YuCD=5aA_x;Loa?O&glrmm! zszvagZR+aXL4-GnWOji8b?k-9E*heI3vYad@nLjR7|fze)JZSpnSL@;B}e6pb~Cm{ zJW$=n^k?TvFZRdS^1<7M*^+0P)5o2d&{3Q-h{vvtJ@*e55dAV8Te<$mzS9v8=~wlK znK4dnLwWO?4ZS5dX{}i zOy52g8tddV<-2LB%hPloVte|I+G2KIeX$e$xOuWu}JEXYnu- z#!fCLI6H-*DOLP-_>oE&_*7jQV?tc_P#im&@WtI%G}Sr$d*6cx1rhu*E}4hhU_*ln zGG66S?pB7T{Og7Fbx8R0j(Tv-i0YkzbotAQW*n1Y%+{ocK`*G`EuZb(nD7CnPhVL8-<% zPEGrV@8cSiJ>`P(sTf4upQHG?PIy=Pt{BT+_}QOM==?I2e&Qt?>0pQBFD3L(7rxS0 z;f8A1-=2=r<7)F$>eLYnzB!8@af|TuhCrWpxzw|S&Nf3>B^hh=yV=lpQz@4u7h<94 zYQHu7p;GVPP*+pTc$Ytk+EwC5@8iMkMtSUPcE-UBG zT3rm=K2%N#{rNM~2ird#Xe9h(!)}GVnc&6`nW1zS-r9jhp2YSjqRJ_sV?PSH-;^pOs|t!nssq zU)|}Ktw<|7qqKXQ%Xgke^T|g1{7dphn!-mN!1#2 zLp9|&?%H})#rM0ee%RWwOW55nO(bXLlK8qBjhZ9_hbF@NeuN7fB0!j4xX;0Uq}u|yHum8 zZdB*W9c8lWh6>#`kP98d7%RRm*B0&^otVPGx^Ap32o+vYGW`yiqq>*VW!k$ly1KAI_*aKMPvLeqD=xju7bZ*`ji1PVTtj|= zR>lwIv+}-bdvg$@eTLK7!h{*ug{S&U7Sn~Nb0~K_wz{$0m)XjU4kfHUT)^rU`Di-7 z*7UqiE}e+jbw_Ex6+Lq&$*BI@TJlyMx%7J$M|EU}D%rHV*GHl*+OgNQM4rzn%!@AN z-0P2OkHK{nGuxV1-y_ktwP17}6b&lm&k9${_KxA9lI!T5aABL4QskP6Th|I|n&=T! zbzZ&KG3McH$$;r&MnGE^Y*kWF*2}>?AT^#-Ta{J z{_TnBlfhX0X3Gp)SI%9`;FE>$M(_D^QReT%jV+KtKfAM|sqa;UruEt5)y?U&L7wkH zgE%I9wHndiT`=~dmUkvb9h`aNJe1};Bv0v@3u`(QbGL5}!@3txbw-D$_3o()tF5q> z%!XRBm(ekEVe9c64mI$gZHYhsc1y-rxTr%`mScK8mDA#*)%1Ir5d2l?I-XVM<0YNQ zB#jf2E4ysH`re%Zv>p(MdE*OHEicqJr16(%IO(nqw2MC_lTO>VeT9;&+YU zhLZ`V5%wh6Wy$YZ_?n$2ak5}MdxXpA6jX{ ze_Z25Zxx1HyQ$Ru&yoW1*?*KdN)yK_O2=rHtLHXSn^(r4RAGeyMMp*uAUdS$29hgO zuYfkuk}EuHGSdpk~$G%Wqb`4@okMUeF7VXn!KiN%- zPt3)Oi3NTf8k9!meuaC-X+)jRLbsdDPc`I9Pwvu$S+3tzcuF4@T?`~q_M=OL2k2*< zg>=)Rr5nz6&tyDr*)gw1cn)tx4|c7BnZvFt!`4UDh~;LyZyLcC@%NWmxU%?qHjDJ! zSlnJ%5dS1$`Pz~);ZeWcn8uX`<(xd$jB6{;DaUsDJij-b6@|83inM3!g&g{3yV8}R ztgnbBVz{Mb*A=kQJ5lCaMI2kE!_bj0Rchyc41M8`k;@2S8o5!qpcJn}@jVI0e9-zh z$!8qNerL&-Sx~^B_)@Z_H)3CS|ztN3#+VPkFE`O3iYO?5@%{ujkd1y6nz*p|UdvaHqzfI=dtn?7kQCU*?kk*q3?{;k?&PCFO@b!|f!G zRCMlray0)wHP!vrKG~c;J8ID+aFDQbB?mS_@*G4rYF(Mj(hc6cx)Q+3>?n>twB*a) zDsG*eO2e7e%#MGpjf5t8k+9>`XZKODR5{ zqSMK!p!kaxPimei<7#tk#)V4OmpONBJcMhML+>nmnre+Fb;kq{FK73wCF~WAkIR5U zUUU%NZ=Gjq$CyD3-WNi^3sd%QaU?i22krLK#~l{7bca|TT3GUjpXkG#(zx#|e#(>0 zc+u;gYP4ezZ|Y1YwSlqBa=rL0c{c6exRP;6Jp1A+Xm!?(C6QHh{+Wf7d4=MSV;a1cc?j~92xlHcu!%4$H`fW_2L2Yw%76}(%YYO2ym1vqD=b=-V ztd5se*mUU|GsH7+Kyncqc{5COU1No3W_dA)AH&iKJfV31X9Ydw?`Yd_$ve|fpRB1* z<4AHFf-9b@HD3neur`#{8;uwv9GB~(r!pdR2RM0 z*VL9+BfRp4u|)R8eJx!XcQc#a$-a`wFOqzO(3?yK>GB#O*E zjSFwhSzO9ObP3ooyi75y$(F)TWlQ4OY{>Fd_^Xh!i}T@DxwWX^V9 zrU;*C=x29YZq8vuqUf*F0!dyM$yUkjJT9N>?HbWT4XDJ!t}{ymJ}K8U@rPTF#bTi) zugrXy-&N))FJ`EaD%TdRrfUyrQs+E{lHZ(O-MHNv%j=jw`+T<<6E zyVK`qD1**TW@ND)ZN?UJWwZEDQcI|l+J*R{29iIjL-7?Owx-x&ecT1FAmO+Fbc4OY zG{}sUzSWVMumXa1N?pFEK;!d*IyNTFP76y<1BUGI#pk9m4{G_a*d>Q}@e%#KIDmEK zF|3#QpLt0ocEZ!Tc&CDIPwQhe_q);;p633j1kMY`$9?SxN)L&y^pNbu-;9#|Tn4?q zIIuBbCV#amWZR%>rUi)qw%tp0R-TtfbRuXTG=OfcJSfV|W~S`Z`^kRgP0J*9R~oX) zqKf|V`BHzWrdG>0Dp2m(?6sEX#~8j!e-*UDkMYv0KmOoC$v$C7%!;J9u-d~HSEGKD zth3+B+0dgUcVa&(-;QR4TnZzn#F9n}o%y{%K6dVI!ps^*(V{5YOP*V>?C;J8XS3q} z`!Ul7WdHo8Rur0`Un`W^)6D4m!I`Nyvhfmqo%fR=xOhddRd|74UzeeEFqJWFDhLd1 zN8L8JmHQV9UXF`lsOKQbEb?McR2CnR<2k~mVDUo80LixX7hm(Y!67DhK~4RKeVR#zc*F< zaC4eJnS#j?BjyZt#jt4($5*(byV#94+Y;G!TznXFtLW&SL7gR4eDAnRHHdzsB9AKe zctp?znNu`#A|W-4)4kkrtC~zV$*1fXV?^tTl0z(-mWl?|R1E#1#*h1|ZZ5XqKbvsQ zZ8hPxn+wtR^02PsjnVsHlK00E;yZ|5j`!8NjB9loG6(RM&H?!=<%N&{k#kD%FLo}k3!!6{z)Z0c&+T~n9^>aYBP=T@C9i2V?!(G??%RyD!JmaiU@Cge5iAtW=?D{Nwsg)VUGj*Q z9PnqqVI;mot;t?cM#JTq7}k_AUFV0|I;So+^#;*?o7B_p!ei+n939aU|39bgQ~5-O z1VwY_tuU%?mEgW5k0!#6)%3i%H4SMp>ZEcom*2bSN}KMmpwV*C&3oqm&tKQ7*9ek* zB$v#?0?&dn{t#X>Pb+X;)tHFqkJYxLCU`9x%cJK8JYD6(+biiDf9Jwu6Um+*ktCU= ziWmN(gI`SH_})JMbECLFW)NE18{3LkSp z20NwS{MAX1%n?tOotZwjbiGLx4(R@Qo^-mMg4IYz94GtZ|EJ8CHaZg>UW8|LCZA4< z*K}|*TAX{R>f9PYOxy_Sb{~x6Qx~Qr<^C^s@Y=CJ%$FpxLHbZnj}oHtlK9e5{QBKr zsBKSftBM|dnKCS#205O5IWBwfkvUWc_;YGhI^liOSU6YyeRs9^(^DibZW?hPKdByF z?kk(g1~e%O!s(#QiRSxp`FuL2Kb=LtK9R1A;+XWpgOmeuy_)ASCa#FJsyl(&d(~ez z6?M0j^J$;(aYP>wv1SSe7ev1iZ^oH5qFJitN!aMQ{IhW?1H*r3;n&xyztz9W;DZ52 z*C+9Eu=wWnx0qnv^jfLKUZU4 zh%R~8NG=JFb@#8%EXkF9>uE>w>NzoY{bZJ`6CYGX{SKBASiA z%k`u@`KKt4SKqw(EjC2%k5id2&4OP;DoGue#gSC;8Gq8jeefAI@`NG3cZ=df12gu2 zail?inNR#KbN0SNX^|)W`FG*vT9sk(D4C|BZPN5S1JgRfqBx}vMOk3&6(c!OW*iqE zZm3+t7l+!T(`J;+?IyBD=H3B$#jGEa$?S>vHB2&o?44NXSH$%$ zl7~7mkdylpB-3$#a3{(+)IW~cis>4EFPfgG>Gz`Pd4oD@F=g^~6;;=i`8&t+#?p!= zIWBk($;IV^FMA|U`26EkF6tQZXqfPRlH-^!e3anjJ?LB7k}Hksuwh&;<_g>NcP;US z3#ab4g>L-2WGoY&jK@u8W7T!@`CC}L?#cNYzb<=^a2C9utN0K47>*u=?-}7sTDg#7 zl|$AxXXg8c(N4TXJ@piC9~Cp_hUB#h-*&?;;gOAer~KCpl8nC*^si;fi*D|8`d4zm z2LzC6Jd*SaDa0ol5?fiqi}rDhmHI%_^Hx^1rR&?P%73iXAHPIkVsFgPea^g+`nsv` zy)PEKvFA&i?0989Q(Vcb;B>wVcV=o*2LiXeRk{OAnUOOd!xS6lX?e0HSn3zyoXVOnr5c|X{=GAAcHC5##eXu%ML4FPOgXsSiO0X^vU8y$R~x#c zcW(;+N^U~IEy9PP`0-jsim{6_hl=|1t? zKRgxv^@F+e*GvYa4Z)o~DO+Vk3u|HMHACFYdFf&H3jbfL-4M*F%5gt;=e^M7- z-5!Bv&UaE01iZSP84rZ)R$N&7XzL|rMrSt*6j;BiIZm!pak2$uQ z@B*pHFNt1G!}qD_dH-~4LjAaJ>aKo&O0|6um>}`JJkWiU#&vBkUfv(Z$!}3O9knIL zu$UDVqEV<@!khRT%6!8%wdu`70{@xD_~L;i?DFDHRT0;2x-z!?a7xUs$b2=7IW;kCR(5DjD&@xM zOwPEK^8D{ls{f`>s`h`P6TUivb=B77nRtk=OY$~0dQng|nN1txIcO=qv*V>q`zwpS z-^*Cutq#lod7^G^8N}gflF^xK%@2E5ysV|ptmVbre6$o_Ag4I7gSNzhH>ocJ`h zU290e?Qd#}%p+Tuh4Ofg%{_r(x>QuZEy*J8r?duno` z8O~s>GH1N)hhRD-idwfsKVG*=ST|Wb`m>UBrA@2x@06D0wPaR|U|?(E z$uD$;_1QF9;fPOQ5JkOWxaq2x8BxkinH!$pQY_g4PnE^{8uiydrkol$jCdCdhVFKw z_wszEOP>3;kXXL&jONP`Ck9T32FUh&OY4)W<&p& zY;<*9__e1n#UIJ@)Xb1T*>8PY62~;jKi2ehfem#y|Kzl?G_YfiXC#@E3~6>=o)7i& zh`4V}=LZv5v?-2{Erw7aJom;UDyhmTpt12=b!XoNbwK6?`X9y-X$W563F*0S=vSQ&g#Sk--Q);v6XC?Y<35i&Is=f)o5p&5?5UJ420 zY&ft&a(xXlxt&zXiY}eF+UKE~Kfr|3k;ngb-4$3{FMzkquPb44w zNfoc|ikC%vD`Do`+FXA7SQ$SyM7t=Q1=2T`hq>`i{;u6V_%J?kB%LJ_=nvUb4y#>8W0yo8 z@2+6b;ScI~{#AA9x*_Ykjiu@T`@m%$IbmiFs|tKDUNDN4y;C^Z#)$cCOKH74o`0oo zJ?W;y>3OvnZa#qB=iKRkXfXF)x?=tzkBianEZj7KbxQQ?y`&~tUxf8FnKMYfMcTF6 zm~OnUie-+{|5wqUbrGIYi3g>B=dm`^{eO8Hd$kh&r%S)Nv4pA1bFuO%(fB;ueqWX4 zz2izx=JThQPG&$0+2bw{PHA)D!)$WnXWtN73g@dpa@LOiC3`2yaSxMx8BO1(rsw5F z>CoiY`ocUlpg{viVGG%z|EJ_#HO*&cs23%#C(+w2h6j$qdT(4pbK&S6)h?m6Nhf-D zc%nKqwc(DfXkfdV^KFwmdBX3hZ0LpG49P0VN~J8!ihDn*nA;_b+Lx-)^t`?P&A6Fz zTm54xb6VjOO;|CAjO`w{Nc&^S0GgSnW2~i! !X_+9r_Rxrl;i`sDTi_$qI z`}2Sh#_9-{r)>$p^`&z1A{G(CpQf2k^FnJ_Yb z6c0KJANqk$)CO|&&!qBB%5k}-i9?V)$-RrRLKG(tPA{U z-dUa#(uei>p3VEQo|5-5mItF_nNn$q_l^=;%C2RTTu++*9yI-YMtke=t*l11y*hvf zH-<=VsX2BwPE=gT;*VS6V^188^XF)^8d~!4Ss|}ZC9^NGm_7eAqRFufYUMIR*;9=X zHnAC&fiCEN$mH4)d&w*qCO+p#M$4`{yu6qpdxc+6S}3`_O^KBGZnsM&_;wD)sH>UW zr`_-r-}y!#R~FA7O3=qx77n!|e@7XaYsByLt(5DcH$8Gvi@iOJsg>(R^I~g;dO5N6 zUas)5+&LUQnt=x=<63IPUwevpD15LttxEZL>ynBaa$k8rG+^V}Av|}tChf8dU4>&4 zUE$1Asa{f;pmFoA$CyA8~K2m-gjCePH1e-hB@M(}M8@01AmOh}f zliV-+$Ft7Yo{i!I+dE_`Ms0+jHAxE>*K10-NdC%{7>s2vbM|Qf1-0{8e_V7lD`d`I zr-=Xbg|~TT8YAjtaX)Pab9dI^%>L)<+ypbcFNn4~crYt($(((kWR%JGS#vo8hw-u4 zEVQRsa=E`h&qZIp&#R%@Oc0&GyEg;rBfa{bZk9|RNxASnK`ih60xdGioD{xsoHS3h>7WD6JAhjT}Vagn-luk_RSp1M%lHS|<9)A{Q=X`;hr(JjEL zvv&no8`NU^rl%^Pc7Lhi1GuDXOSdV`*iFl&>X946ZVQLCPc$~`?U>qyj zjM(?8!HTcST2JyG{~V3`0u$oTxr!ztA9cb*bQ)uMl93Z@G_q|)4ixuDSrd|Gw@w*!+Fs^3=qBE0omW1m+BE!rcKNIo@}xA z;P5@k)6n-JVoDkZE_(9lTL34n#tAtJm-2g|5j#S za5@yG$t+%Y&BLUgua`}EyKdLd{v--_Jv0C>1O&1U1!$H{>UdiK0juXCxK`h)K z$+uSa?0;L%InkW1Fe;O5bJ1h|-JZ6hv#H%q>i#a~=#TK_oR|0}O8te?9L`YVc(xs| zX3O?UM*fve%L7#!pEt+qvD$rct@<#_hHBv<_HUq1@xoX8hXi5sWKg;17}C3zS6 zk{4MaxiYDgZ`S<0=gkhN>l>Q!zQh@w~`eI z^O)cF*B7RvY7ZjB9oem*oY&7`u2GnP+Av*&IEv$TX^a-)d- z;dvZ*nQK(!)VFL(i(+!w3Y079OxyYXb&w9dKm}v$uohu z{lsrjQOKO$sr>n<1pV#WR2$2^$QdSPKsh?&R}#d z%XRQLgQx3?3H!4NZbtW1+8dcmNq)}e=|(L3f9%3)UK8_|}u(M_Lp8)RhsX!fl-Cif_S0!u_InILnb4e-$#%R5VcQ z3R(N;nM&CFL3#XV#jWYW#}+@zeVI!QUzE@0FfZKZPGWT@dEO>lVqH^3P+$&s8cw6P zWJx*wcS~8#68*~Gk-Qh3a*mAyzd7Wg?e30t{n6+eL^7nd33G0j@YG-`^)8oFb-gk7 z_I_8_3i}ItXB2;}v7o)M|B4K=sWngbE3JcA8=u5JPkWwqtRh45rLTXlV8Tu<>6av* zveB=E^|Yt#i#7k{xJ&Pp#o(PtH^Qrn7&YOY3cI>fS(RI$ z=ax&7kp(S#x)8IrfR=?`*pCn`^lRB)B7ItP1-2e?P4%nh!`lX=blItDjTW6iTr$pw zg*!7?vZqpW*?+{Di7i8MK9a=UOefyumecui9?hpq&WurQYSrJS7G&Gevs$tq-w3Z; zymIb!3fXAjz|nfad>uWJ1zikjaH)vZzlol&OF8i!y5Y6_ntB^zLuZ{7a?c@`IqO8GQHi%+_DRrWaX!$$b?_dgc=)zn8cW7(`;;Y2{s zNfb_vrR7y`jx{b~>R$zP?v}@-SwGa>OAnNGqA3Nw<1sC^;?YER;_M13%k`$an#3P^ zQ_0dE%)Tt)uO{cPRPtzd?ER_YM3gYl(Sj=uDIAk};rm)f4RI~qy{rgEpB za2qaEpglQTva72!KJQbjj>LwZR93fbIXXH6%R~B1-Y)#*SMk&;b7S^;Tghn3V#9M6 z>K^=!$7xy2S|d6u4d17x=j{!CsK#x7sV-LQ^Zi5+ZGudwaPh}}RxOL*QCOwa8VF_B;e^T#;KT&@f$Q-NdaK8BsW^sg@?ClDOyf5C9!IHhXAdw?k z_RKj}Li-WISr09v=JyxMyV*x|Q|5e=n+_%RlLblNU3eh6^<5oZc=%TY2j@=4dOqYV zEXKStn+B7M`7%YDV;$b8O?t+xdJ)2g+E(aa^kIle9_{`XzrdYvHXElhWrPDSYs%T? zm&?a&@xLE`q)PWbRKGSfB5v3yT9lYGd#@YIE=f+PrI*Y@C-O}6-rAD?v$UV&kEKn; z?w>MDwzucQowl?-UXPCL#iRek0mGsG!l%ro=0YH4ttRs1bTT7!MBCC)=H#8TIIJh! zxhWr%!H>;q;2|Tq&n0qN^6Dpzk^Akp9QKX!WZOdFxOiuf@=dPy5oLrON+V&B>?=>{ za%{#2)p5EpR?C8!CilVL+BfJqVZ--&355gS`Pc0dr|*N5F0`gh!=lDfT{fdAmIXteyR9hVU2zjy^XC2 z$D8=EX`dB)@3^v0`lHv*o&^1=S&-FMu?wufhoIhIB}p_ zo-4vz?;j`HC%H$P?l#9Yvx?VEC8xchniq|=$T5DZ`ZSm4?GnjYjUB|3!7ePUkQ{pp zFI*q_(?~ZS-#~o~#+1{{DG}pylAHUuF6)keP=OKROVSS)PNeL`yGq7!_k6tSdW%jV zl+#9W^uKCFD^*FX_&#-{F4XjOe*UM_$yK-1nwfyRusp@qo*^Fb^~Q^o0Gwjo$dt9uqSnXG?7+)`Fy#8 z+N+{D(z}XNpFXL6-FK)}2oJ$98g0=}eC+AK@yFsTO!FXo&j<<=lF=DuNI*~_^Y6uR z??DOkU$v&gv7740SyR!3jHTp?DaLXSs$C`eo_uGV26~YFGK#-@ik_pol=CN2(33gE z?wHmz;gr&hb96KcULE}1X zUi4L6%r?bv)EHqaTafJOK}4@Sw*2YCoCc%0;hjX_a!cW3RSDzwmo7n+b2IStCV4CDs_&?ex}wZRnzK)n!CaP zzk`ywu}%7;C&Cl&pUbT-zBIK6W67jz` z(JB`m=U=hnxfH&BZCCV0##7T#G_ps_**aK!V5>{Sd)|sZ2cN6;IY!LwAIih8l0WxS z_D74e@Y?H)!77>gq{YfiS8~msmh1x%c%(*kH^%GeSYgHuV%RND zOh1XosfiQeC7whtiWM!C11pMz_nRd8_)eu#JJsRkn|EqL>p|QY9?C?~!JK;JgN1P8 zHl6k6Zu%%@s8q~v+wf>vh1{pIs4$j2*TY8a`tzZRId8xw(N`6TKcv+rS2~ttG3K5V z9fuF+@5~quUY9%+@pibi%VI`&K3fuBs8)eb)vlcebUExJ*(JiQA0cwrl{qZl@5uS6 z32d>M#BdX9ddm)E%e<)+9W10%RtqwxK2U+Tjre89NMa$H>i7K zFnjt}5gC&8KRs>lb{d-zN4eFba!$^2+d z34Xq9iCAz$#pw;g!E!Q*O^oO){R0N6!i0CkvW_d2v2h%GWXCU0#J~9_gM*h!HGbW# zgH0)Nd#aAi89-X@aOqtv*dlZCb64g1GjnFHiSV8Zqgg)Lo}&>Ze3a{QQ(JkiX!t(Q ze*dg4PQ9b-L?4$Uy8IKu8}EJIlWMno_D%AoV!$}UeA3VwV!?|$m2AC|C7gq5G<~0% zp67J4K28tcD)TTS-W-=5??-#dU-RMdXtBG{KY}e z?^NQmmrC!bF?W{@5s$w$3rdCmxi^pg!@WeSHJ%K~NeTShoHo;om}8Sdx3p4?UpKdh z7Hg+`Re#Pk;)7_1E(@>Od4%X#KIHK2gtvh5L)m*Kme>6)xlmuYL=C5+C%Wd&Hch05 zeyPrE`IS}!d})(lN9J31o-P&rxy+}=_Zr1{?-XuN7);|e#q@|wrbzrDyOuY^?C>S^ z@Qw*dCx)sY<%A<|F@bcU0(rZACTD``a zBiDuF*36K-OZ?~^mPbWw0Is0{3|J$zT2oUzynmx_y)4q7{YKIcZLIVzsuHPdKKGo$ zFG;cwk~#UYTcq5^qojU&Tk}{UMdcwh6TD>Lh&IX*i_EP-8HI4z?``Z1q%?*b6UbUIYik zj~_2wsjFkN==D*aE1#qmxfaca?V|TSQ7N+p;ThH~N7M5nVq~Aw>9jg>LGoJ1q)Kj~ zDU*h|P&`iNOWKml_s4Kvj7{dIzcKS=U;m?BD&ZF5=UT1Df(2L9%QpR3U^5n*rp)|K z76#B)tAY?incegj#%|s>)oavkWm2mzC--@wb;J}e(Mr6`$RMn)J9bTjIPoG*ybKlu zyBF|oU>p}8i}!4?9zE;4QTwF+NSYpseXa@N&-{7VM7(R!4lEcGL`3uy+AkDl-#h7V zqKnWOQA~8_Z_4!R0~K@Elov-QF~i%2CZAoIosh@E5kv9XKZ+v-$%Ho7=htG{7e1as zgLPHBsky2~-@C6i?Uub!$as1v1NO8Hz;8@CE3!TL@6iN)kzJbG+rf-^RE2I>78ekI zfrjVZDfyvV?%k*Ull#$L$wI#W#f&jm-FV+4m$@swdGa=df4Zk(E&G~*jml-lF_p2+ zD;c}$rrMWzUKKqU$dHgpWS(-QO$+g}iauo19uKO56UdV+g+qThp`BUD;4KNXlRcTH z=e2C|ugbd;sj_wl;$2Wo&v~Z&C9{|rg$0}pH{oOG1YRslpwS3Z#yJ-g@*tZ7r;9ay zpPHUGyYj1AT2+^}H~KPCc9?1NjOZ8S$@4#wSaiXc;CX&*{u#&UK7(kMP(b^;lR4ro z&)fJq;-A;WvxfmM773T(UweM!dy`)xIa`t5%vc&rOi>K?PP<{VM|hQw@@Rcqa-Co3 z(#`Uzx+8n=!%xF`{N9A4-G;JJSGeeR+{N!5O2(&jYQ1o$xn(tN1{R9Pt&I5|A5`kZ z&+6#E`sltEW@jJaW8#6q+iYIQ-gsr$I5t%$;8({+`~tZ{7zbyrz8 zGA4N0XtHEZKW2_I!7`U>C3WO4t3sGT5-nR9qkXRUe~gZVw&m2?bWdduzNvcsXGW>$ z)&ft8zM#ehv-!F7U+2rSx1p>w6}`>+0bHq7hX3X`zHJo!nadZo%VMt@Ci}*S!qGgG z{o$v<4h$YFIrVmKOlm)lvVwSK?6SZzJ)a{tQYg|X;+vKhTBlE{*lqH6y>&dN+lhWq z-;s5fWM1d$&Y2mZ>~V-ER9SLZa!DUsNUbKhq;tAA!sE;fb$qA+lUj#yO6I$I;T|-s zlgCN<4sV%;ao;C_jADCYdsVPLDVOE4|NHRpx=LRAUTxZGgtoRneLZYsMr6;Tfw^Q_ zxnufp5{qt5re22hqd^7I=SrU3TbXMu_^6iKT~>C7%?Lj=k>A~H+3oJgzY`0H4|Qk7 zqRE_}7K7n88$xc2j>spA*|Vg7kFCYP_7~KayJk$86p8073$7n_VN~OM98x{``)C*? zE0Sq^&x+b+W#TQ*L$T zLioZm*MDxEEm@)^L|my$hQ)3bG{~CvHu17QF_L;;GN;lc=k=Nk*&ByZzCVp5(bX5t zEu*AB_V!YrY5G3H<~73Ym)B~X-T*#GpPzQshLR@klEsZkOc6cl< zCgOb>UsS2a=ed>KSB3^h)X{-fqD79OR$nu=Nx#{^B%kJ|{7LN;&Tl<)IdIFEW{-r) zYao2g1lcz_JyiF%|EtcomOaOnG1%U>;+pUaRvgV|N=J7Jts|&;n8b?$D?UssX64MO zJa{8I__?pt)W&Dj^2Q2B(dw;SVh1%=Z27&2$->JUep@)t<&ot7-V2?x`2>E5pkDt9 zPUyWyk-GX^t_kG(FR;9=GwY2+Kl!iZ$^8)BpLoAAY7OS2Rv|g!O;|4em&P}B z_{Z&o+91#MF^7Xe_I0)c#7FsBG{SFWo>?Y3Ij=Yt#9QNfwG7>_nT!%$?6?Qd)Zx(= z)UO@;OHCe%o#@3Yg`=A|NA@V<&1ldtoWA1is<+L8?{dA*U6DYy!cvXT+t9xa{hvQo zQ6@6w(2FC-*|GU_b4_c4ZzwHg# zZzeup+5dI6mYh#d;eE_>#@scGInSdAdF0NF{|aexHkaL^Mb)=z$LI!2)#`bJ(Hd03 zW62{9@)o|0t#I_uJ1`;NhvyqonX%qob~MxE`pc*PL*ZSFu7l5Ta@v+={jZWzgI0-8N{25l7*o)h*1aZSZ|fbrqMRs zD~M+I+ISuonX_(dHNWi3VZyE|jn7+qU5BL73o3n~30Dj!5IxU~563;QIU#zf9-iz! zDmw|?3>I71(aO3U1G^mjhe$oI;rrC|yulCJ^2btPgsnHCT|^KcVoca09NeXkv&dcR z$VS=WCccZrM=r5-UZT5hmm|CBQl2ho!iKI|#NRh2>HIJPn@XRsX()O}GihGrL1&|3 zj6RpZ{C}J&Z&WSX|2#%5DAV|LqfazuX83dE`p%STb;dF9x&c=s0x76S_^K=tKB#R&%}FmE$2ZXvb+vb=L1{jIj(*te2qk=N5)snp zO*5#VA}Sp_;e6RUwP5wD8)|X7;^e7xf-Z_rz}^{usR0($cOl|~7sk1w>#{W$K2J3( z#sA*0=QQ*VcBJ?G)9OhFGxE>H686r5#}2L}=Vqb*yE~PR;#Itr%!&Y8)*YziwA3=E zW2*R3&{%k_ch%jeW~}s*+(hwD7u6Adk~|Z}O%#8Xl^;jyq;l2Jn0?yPr|uUW=f>&8 zOgp9wo?TEA|F9)>+EjdgvBfmdml2(EiTul*7v*Uz9hi<9=SoBW8FZGNxlZUb&QE-x zT2^bJYd4Sw;>k}*HREO@U!G1)Cvm1bCy!3X*mVj=jqI5(oada#Ecwn$snD*&+=tK9 zA@QNlUOEB&ufqMiF1e&b3uyh1C(|!aVDCBEix!9<=uA2PHOXV~X7R zAKym(dH&Xmn3x>a6$>NxuZfHqmdx!c$p=3>3y08hy056jrCS3w+>xBZyZvamb}&D! z6me5MNRN?vFvyMk=t(TCoXiioHnx40dE}N{D!LWY^t3Lk7M@go#&$GaGL?72v758Z zhmfdTJoM#t9Y&G$BZHn_h0hRPjq9)ijP_P=_{k2{zx)63c9suOcmLBzL_}=tMl4La z^9;RoBOyu&C?QBlcQ>fmt=L`I-Q9U1wxVKV-}C+b3H#uF$g!=)I&K5nI$?Q8oo*$L1pAupt)TTZu($=b~lF3fLU$%g(N^BARo& z4>Q70W}=Uv6OKrI$9czTYg~+*Oy5c@S`%zBe^M2691C$kqa5Lv+9N2TJ$epM!=Xsd z*jwqN&vL}XDH(WtNrJ?h0Q8*@g$r%;IfNC%#WfQXEsLaHZ~2cwNL+SDT%%rfBb@vU zGtR}IxnRtJ0@P6tIVt0Twwwfn&#=dsi1|>-EW?Ac8lHV$iL*Jc#m#=~6QWmMwoDfr z`1*QK!#X?HhIibviP$rg*mSaVwp4dmx?em>@86hdW}ENaJ}7aJez(gzua|MYaRNzod?JSS_p z+rYqW3W8#1BXKzWLPv{GZ<2-g65i*EI-;icf5K;?3DT-UQTJpLW{l+gyh|32jj~3B zyeHI-CE(=;fiAbpv0N(`GGQfB|J;y$En+~sx8lGGW9Z07!QmeJy=J@O^akpK=Q+nM zi^T9>vG|&4i_x5;hCSlU;stwte+`9c`_m$kzL-zDOQ3vMha6`YIZ`chZ^{mml zhO_uh=12-!2xE2TFya>Coq9){KKxFoj@7}NAwgJQu8$|%ZP3Aq_Z;R7I(GDj?a%~_ z^P+EZeHr?xWPV@-bCkg6iwxNcb6to(%?=*ieLF8X0)qkv;<{*ZYwE zRV?|_CeAI^!o~}Z)Z$FByqNFf!#td0uB(4UERKalBP7cjsv(82{*{B#UWEuZkwIDM zD{)6#59+`D;L_g;t6l6-%QxubEjQ|WvA8jvy^mqQ)uIx3P!}G}oZKRx&KODZss2i1 zJgb?GKX#hfBymQG8acYui=wi9Fg-F2rZyZ+X;eYwFa0l~0=N|GV_Yzp>SNpR^~A#UtTMdFhJoNUkRbp0dIz1|QFoukRR)`od!M=Y;q zF0!XP{;33D%m8Mw{^*cDP>FBtGT0|rjVDF#M6}T}am9%Kv0s5`HqwA@fCO8fWx{i_ zEj;Oqz15ZZ@D1epwWHrHB^{O1i=|#~ajnupD>JtP7x@#9e)?1q>>SqO5ohgRo6 zq>%j~7j1@b)Pu*7!=0s4&fIz@y!xNdak2#(-{v8YImo+yAt?H<8l?)J_;`}O{{5No z?l2Y4{gxy7Wj!X}T7bk8@+c^NAQn#5$3D5~*kWx2^JU)9Uz7`%Vk<wVDa(QM?rFeF2q z=lMW#Jax%ojcbpp>bD|XNgMmN^1h-+f8+ytE&k(N_^k`3&JV;6L+i~u(H_-*4vYqWj2AbOBLQP zOu_FhmFWKEuF!7(P<;PKE>UYVyS9t*b&T|9dbj;KjGu&j}wXmBnTxRGCw7>K+b$w<4P&v{fCZYifjBdY>lu|09_ zQZz#t6$9=2)P6JyVcX1kqCq|s|I&Xp@V!WB;+$XZf1E=T zn8i}}9Gr*df#gfu1*7(85_-+FppU!|ucwnYtx_cQdS&vm*za~l+}daelM|ECcMbQ$ zb@T;!P}Q=muA6YHOE7O|7j z8TESu(DiBpHu;Rh;luPVDWxFIkiD%M`67D#YEcrOj?)$E$i*FqUVM(3Z!3h!UQ3*u zG6ip5aOFwc^^+eXLh>{ zF)cxJX%1PK)G;q~M8nPR!Z&py?mx7FTn6XNryXE1n&(~Sj8|z)MVL%HwCJDLHz-6) zr%dGVKCOMR9r~nw6d(KR!m2V1vGL4z(s!Z!mUE402ZWpm#>#t~%ilJIvtJpzkC`8~ zDCZs|i_`2AF6VjN<8>fjJT}0BZ#LLVJ!D{;1ClpPL3rnQdUOqO?O`#-@mU`xSH^r$ z4@91RAZF~DME%AK`pm)o4F$dq%SIUY(fx~DP;)sN=8C$Q#=E-@|Bvy;#YnE|iMYJm zBH*(cTz0yn*GW^P4Yfww>Rd?Z2Mmt&h05s!ocLx&y_@~vEh%sbERuS?;f-YSkGmk^ z$I@?TGXo1%^l^_I9=%g}7%g#v>VhC#yu`DdEII$>7eohZxZO;z9PvY)X zEi%77VPnVhb%rhEt~2k;{%|ubKOEJbMJ~P`Y)p_Ahvkdj3&pSM z=yHj7h5Ls1v%nH7nH%~3!3KR}LokVRWYr_|6R#-5TGK2@$cq(IyJP6k=fW_50xZ?o zJA8rq_6U2(TBp!EZ-dT1*;PL$9?y3PL|YZYeC`~`tj`DD^}x{fFN9qE1Pq=(8Kyf7 zaCe^z=6%XUSfwpiY~%c0E*@2;4w!#|{?8}57-dl;^?J@a%paY9Ev)<|pxVg`VHXU@ zYO{pQrfle#+Ms&lGz_kr0Yh(N^f+FCF=naQ#b@}?m4oq=xq=Z2W6^J`0}ggK$9pUK zs|?eT^xB#XT5se{j7It>XGGGMHSzV=mN>vAiO0f@P({CMlNJe&*?ch(!CrhxlcvJ5b;)msy<;TI7EKda(bP2)nL!8VyO17kb1rA{bXT3{FO)?sRh5sk$AG6JY?Qw zmrf{v(`?S;kJCT@DF)#cX4t7%#(usGgma9C}ThEY^eKAF^v2na5DE#=g#J*js9g zQ%~48O1{P-^7)?ss)Y0LSZ3R+q+UAl_sIrjjpQ1t z+T!ba=2s3+$Af)VSn(#8{aGpGP~<^(gA!`#hi;$4&u60tcI8qpV4vm2lj+zKZ;4-f z-Ecm8I{XtYV0(go{r;)YBmd<=#}^{^;9FtET=Gu88SG^=#RY}Cl}xc*rz+pdY^Lm{x9r;F`r_Hf>ngUe1XWI+Z(Wh%LZ zPjxVAWI0-sQekRXNlo{;uxfMWKer1Mo305w2(TQ8&H}=We87 zN<;-Ux5r{c*XP38#sEK>W?}~YJxb(RWmV@QD8dOo4a|yeOTmqcrf{N<=y@8wX3kYo zueWerqfjq+A>8JfLXP**8CCl5{OE`fazYB$+rqf>9F%=Xz`2Rc3JkBq%na(01?*LK z9f-Vod9+Y(SbIVbuD{Jt*pFNs^K5K?B!St^>6kf&d2?n3T;CKxo1IhpG76yDO$NaM z=fx2>4NPAjiY+0=cpYKK&WJqxW3Ie>_;kD}nuB%|IVTD(Mq7FY*3T-%e#twb)&Bu= zt=hO07lDG8SBK*XX~@T@YM4`I$ps z(6}|OkbM$@51nS?r@0xHFDQl2Ki=DB%O9p1U zT4B4H4-R&UC-1vIGZJZi|7>_X=P7LxSJrpl*r|!e2OJ0dr^W5#&QJ zib7wo3)jOKkrFoVKkTAkq!B{`K}PN$9)r}7B|Jx6N^nhfHm+`Bj#kQ_Q+Dc!QNyW&-Pc6-oq_l>PzTZHJh5YR1{N1dFyXT|syij) z`&Vit%&To)od?&3QfzJCCRW~jD8x%O{3YiL+q7{*MS^Kp_`G`Tih(JfkWHI|xwUH8 zzu%rHx6s3!f{rSCJqyvSU4j5>d1O0u@xct={Z;#Ev1I|CMm9yXHDfhER z`fv}mi6PBjh0-o^v#Wg3^8o+;bIFy<$;N6Ma-u9lk;+VtsYIard-mm?rGIHo8TQ|O zDaJBr$9(5XT{=7_x6<8bk^Jk%TZ ziS>1+kX>1X;LipaR&Igzy$Wz3$N@XM`oJqO7SC6x!H|BE!?%)AUQ+`((GH(pT@c?} zO(40c z&c>3z+W5BB3cJ=9;7znWpU;!A>~JcQCUc(1d;hW}+=C`mL)zCV?Ru^M`Xj#gcd?9f z>94g@;K=97??xvmvyZ)k`J9ce?&v!@9w$4Jfv?Q{WpWPQ{Vc|6dua8ASim9q}m z@F)j=y&MqV$qgIQ(&>4kKYMc(()MKGaZnW=p$G2m|0)`pYkfB*3@vuLP;B$!XOp}~ z<}F6)`s3=sWL%%XKKCc&!7<|);!wtC@-K1M;)~d6qzB`YVCI0gJBEw?$6vuo*vv&x$7^{h1z;jl=oe)We{|Hn_7r7mb_U@VvV(hVi>9 zlQt2ntxB=cG!FS!DiNmJ4quPl7t_-8u%Jr}noM|L0n49J3yqb+K(yrAhY8*e{ZVZRgg?r3tA zM--#O_I9W|_F4@7poTl%zUWuT^PrhEQrc(3E|@$>ed=H;QRsEi2-6IU(05uImJTdN z>*h|_*yW4ZUc&RP#1EHy88V|`jlkF(XolHg-UC0ZR)|JZ2Yx>5OK|&tx!t*?~WZLSo1(zSnLf#AKPTqGpFzKljo$=EG%4`k2a-t7;*fq zh+jG$`~8@^-7HW!(2}0sT%519gUjIv`od;_;b`=swtMiu6s+(mfR}oEWOQj2D)sa~ z{PIQBSo*=2+hQlbFMWoP*S#eGZf7G9{m~qL?(8Qyl!X!GPG%jK#Y1wp>H`e$H!B%g z_w~_!xD_NK9~;Tv`s+0fyDvu}mOk6b?YUp``>fu;`!b}@$d%)UtL4Ak>;E$w=dMcwEdxGoNiFoN~6Bxo+t zf`PLH&0~0O^%R)A))RIcX5tq+D2m9XstL=(683RO`@X!}_+1RW`ANLgo`@#S?E+S* zU_fts1dX4L0R{HhGt&V7N_Y?5Jqn(8d48n7ZxOkE(!MX!uGe$-ThUZ@Q!L0L2Wxf) z9(riQKh+6(%t3ddp1kAi$>Q7P7E#iN_mrJcsHNVs zRM8EFoZlXbW0vmuRLEwM$9X*CGkoxDszV^f2QF6HB1sqfkhu?J*tnVqokl+%S ztg&a$yDvNAjpWG9jBbr3{3vC3RAd`mv@}FazaDPe`mX44htI!$i z<>%nk8B@IfT8RcN-p~70V7W~Pyqb4P6gL^d;(0kX*Q#S%Pahmvkwe|h9lE97(C(On zDW{B3AGr`Wc4U)vTaTb|opArwb+Ok$6}HY~|LrkB)k;g$SY_iP^BOgcD0_i6!FmpEhE5oXRT(-CZGg}~{& zhX>}uc@%l~(VZdF?wIH=tARYbStwH?YwfTF{3Hc1YoxC|m3s09vRQr@;TZkeDr3@d z^+_4l%gJEbwvVDFSrQ*uXwqQC6-p69qJyX>9m7~YS99Z_Q zz{w@@sMlR9+CD@e=TsdW&KYCQX-il?DaG;!JQtpKCwnIc`wh%6%4snUEai7FezDZ+ zeR=#_xcqAtJ9?W#c?-2vBVFW;r!U>75I?ecjv;>{XApaF3v@7dMGY$MQg|u zd>{N0ZGMK>mpcQBDtcJ4R)PnM$dnT6OxFlAP z7nCw325Qclc+=Gm7sxq@o92XMg{d%BV|SPexvxFT@X$UHi}*S98Z3u-rjLcuWA>9o z2Cy5;7~X3wv3^W07Cp7c;|YP-l^u;s1Nr~#@Dy}Q%hbZiVKC*zJc%sNKHsoD&~se3mRlVhDyfOO`vnzr5*f8J=rp&PnD^IX*XiOoL$ewAiEJ5q=?pRs%MyQ>hgfAY!n60i0)mOIY zW|)cpv>ow!mm{n>Q`s?*oXDVZjM*QLo?R+Yvc3auPkJwA&K!pv<_a$NG)C8037-08 zW6V}3jH;M|mNm0+%)$s8Zju8LpN#v<*dyNfL!5o{OkCfuht(hBahAUO$LwF!*_#KQ zQdbyn4MEj2{v7kLy7cc#mZZR8b~P-Yb;ET$7FV_!Bj-aby3kLy@2MpW?DMhwvNipE z%mUg+!_wOT&A++dEg{F6=d^#u3&p;p>&3b0A@IFbiQ8S+pF3Z`x_bfjGZ%Qjnhun5 zc5rDZ#{14i_OV#Rd|iYuzKR&AdrQ2v*N5Sv2*NWx3&6-#~osm5V)- zjj>6ETn=)zGMKMx)nOmX=ZB)TfO$bD-q#AX@#Co@?oG&pjIleqp9p}u|R^ z1K;P&!%B?~5Oz<+^xW~tedPwbUz%85!Sl}6IoL4O0e^?MK<8Q_&aI(e?|UJB-HAaU zbL`$mvh*6B6NY{DVZpsG?x+z)+PdPSZ2@u$CS!sK!j1v-AU@Z{-d(jY9-ImT-T6}g zoZa>g*tq<;IQ(55mmUVff_co{_6~SslaECw+;C4X7#cg$pz+NJQ<%4o(#^mt>P2#1 zyni0;qlKj{7tIH9;+U_ z*S!%flXV~$5s9!~7RL$|H~qK7B5QvMj+M(u$2-maR}a5|EO2idiFX7F8ja3^S#H+*HeGJy?Nj5jpJeD#QCB?Vxk#o9M^=>R(_GOl$P;mh<R1JqN@VcjU8`7eW)h-}JrMGbJ(Pyl_?(yxQ}WV7dQD+IKbm}51FT(M zh|lapIU`ey*RFC{nR7=NCu$?)eHgwkrti(inLKv#m9r%{Br^>KVaZUqVTF~3Wsv`y z0}liCCff~x%cR?4JNYzCcS51RQ6Kv`Uuj*HgERm5x$&03!8Hl9n9JHuE=Lmgd395A zV5D4cYkx#~7W{wRnQUBzAHASls zRqwe^9N#ULDNv`ZNyet3dPq^SfEG3C?NwH&oEL;QZg+u)$ zI4MRV?wtu#$e~>)2vj8H_s2#!Y!Ogqu0zatq5$uyGan!Uo(2E--Jp?3LIx|B3iX&dJJRU!HqRpz@x)AEg|u?QMq- zmeIH&7lVXcbG({RjP&-Y=w(cPg_P?B)u?08;}@d-#W-Z|b%xPQ8#KJP#e2?BC+@b# znzOdhm`wfC%pP9;)E9b?pY^vGQ!V?$TP7E0`Ab{u%+Z)(gSGT)3|=O|N$RQh zv}UpE3D`vD$jNz`_-s^){tbUbxW;=ic%~ZtL$1&~rvp_t3HB|`MAmoi6C(mJx;hTU z^Vu`Z{E)};IcO|of7xnT_&Plo73^WK{p5y@KlSn0f_(0u*_ig+9ydz@(YInYM#Y5AY-ng3)i>T*Ds98b| z?w4$OiYmzQ{w3b$y%g=9>m!Yse4jW2H1Tc|?#J)hGFyy1Hk}^EnHVvU{fsr#ikbIO z+*B&{dbiU1(@St$4Ba~p@;`kMevAw;atl^pt${^6-yde|zj0nQj~??e4@=SMKDDL( zWm3ORY1cCe?gQI(Z-nJC&JWmoDLYgPoy;7NwId7LKHK48ELmn}XX7D^G2vhpN|@F8 zxu6ms9?D|$%x}V}jB^4R7yMmdhKzo8sJfDen_kYCSrUr)x3PGeWQ0p~g=kw$4go*c z%@G~oVfsRBEu=rIzbAZn&-vWV3jfwKN0h@p+S~vvdK`l{$wnygFF-0aIPD(A7;W-h z)Wx0=$0gja%fj*KnjZ9CkQKQ!5BHw(o;r3K0&XVJlca-Q)Q1{;*>#~$KL6VmaXYX{ zZ1|={u8233==VSQin{m696XS>$MqWO?2WUqiu+^kh$8rqhr92o^y^9cI^7CKAZ6eg zF>^0DHA~_!v)BxQ_SWe8BoBdiUE#CC3R8X3vC!ENb9+`|p9YzN5w%!wxE%^L?upy< z<1OO3{9y`b9D5uv%eWB9^XMf?ia=a9&KTp&vGNZ28mDrwD8C9mD}Rf=s^7)MFb!P3 zFd0AI@OztT56gks@Zx;svjWe_;}Y<1~=Zb8Pfm7ZI1%AE?41!Rls6&m? z5_N0Su%sjPf*^SsNdK8)3{*LzVKBTxkRs&UOqR6rgn{usnYT zT32RZ(m(bz&8vi}4EvIgS3~mYh3MLSzxbU(uCiGI>WX#Ig>#kB^g(Q*PoUw;WU>fT z;pRVvzC3=u0wb_}e+32}>;mNvEkZw;&yz`^aQmu{Z~Hlq8Oy#NatPYw-Pth|gF(Zz zabN_`FJXx|>PIfXif&kO=!rN{FcCU+PN@FC9%Awsw@u1HRjLI2=_PGim5e{P%(1ge zAwmn75jj{$#&Ks%9kEgj8)rZbD;L9gzMeOf^M2+7zEU?eit<6|80uitjH&UJ*X+T}c8`mGDqnEQSZYF!f`C~fPuX03wHTi)j^WjeJ z%yA1vgpIx>RGH668Iy#A!?dX3+o1UxeU}pUuh_X^B~tL1I?|iGN=(a3LpOzL=qh%^ zuba^wg{t35{ zdMK+8Kym{2t!vgOJ(P#ldG>IVWd}viXnfvhjKm?Oa8$@9L%W>aChZVX+zz%WCV19k z8uKOESf}jHj72UAI19C&7D;H^40s%M#zE#N=S?VsQ%veMbCnV{QMd~Oc1o@Fm zWd@9xVGEh20xY>f_Pm)dIvwD%S79WIDoW7jTqq{ot0hP3t7x)o6DQAUk>eeOs@?ji zIphjk)m-E}_JCq;C>E!t;OH0jEv&AFzkWKl$kt-d=g#DHd=#Ao&qMv^K=GwEKK%8B z%;y}W?Qp}CcU~Aoudm)F6YNf(hx}(e&$0(z+V%3cKNrVKK8P!wHSlwz7t9>=@Qb+~ zInNv%US*5Y!f+@=MI-OG0Y>N+(UX{pCFhHwu&EpRg`E|y4hFc!S+tjq8f3CN}RmU{t8p(6t&vKw;`9snAe(krV)+cJJcOt*kN}-9%kp; zAd+j{;PYs}E^Dl@Bz(;z z+{=!D-*{sTE-HoCpN4D0%bP)1X4nKlsW{hEcUkANx9P_K$j(ASNtLrEc1|J^-gUcYMqHimq%HmUBKf@~%HF`|u9Uy)lD%!>{S| zn=B}kdOd01Px(7tv2DPAV%0G%luh)+e;(wj`dF}ICJP%jS);|v35Ms$UA)Nq>#hQ% zsm?~n-bHxKJXgWd{{;9wnEERie+~3-!^)Cem|QfyvB%i!zF0eQHZE&%e|cSuNgI-2 zKCT?mPrBkthsUBzCrvm%cf-N~>_>lW%QJiqPRZHg($C2lyl)m>{UYOW4Ryu)%nhn@ zmTvJ*WHdb$L8s`KITQe$v7GV0@IVaj#rq~WLe(P~XZ+KVdEOOc7FA;X`Fye*%kby< zT~VKPN9-ggsAzHw@?^oYv?JU#*;mw)I-yQF3ZEpCN#q2}#AnX(ZYskd&cV;^#3F*N-**5wdCbmn85^jy z6ksFotL^@TlcmYO!)6z} z^SNXbhRUxoxU`dcKx7eq=#y(Soc+u_J7QS|UuUr~GfmMj`=tTxYDc`dLI1pu3mSbr z;2)8W<;u@$tq_(f;XpsBQH|M>!+Rf(=eS?)<=f%s^+4>;nvHiO1oqS4(RWKWmQrV0wWK{( zT>K;MKOTq5t~S(G3{VnpkF|Wp4ykj*q^ki?+a3p{PsUK+RfrLVbFh0s5#D|2h0*_* z57$v8gWMWE>!}OJxk2R^`w%&Y=w>hlgYGBd3eQWsXBMG9^J*85=fR@oyMXyok$!{u zMCQjoDjKjmN+3;#dFEzILEWx#;hmhPHB0S(!MX!@&p{rhhnuyo3KmMM!0bZ z43-VQ`XM<`6w3#+hOE z#auqi&H1kjv^K}0{Z#sNj}+rb-%My|v-gvEuAUyPVponfJez!(-!_J7A3G=?$b+2| ze?P|vWQRp#skarna6VmamrH%P7}Bm6ZTntaKPiK%x%xP`#2ey1=k`_-IE~7|S^AVi z$+LQ19ghta%&pebA25b@WXp2+5AKRIb6FTVs>8FMPcX`6%DWlq?>k{RIg zS=g4$d1*-{%=e|^_LB;fmwXpnZnTL(uT|M6;DB`%0%g_?=(Z;ZaxzY+8ySm8<5}3g z(F))0vv2nlbMxePNV}f2y=-52_q(1;M#@?(`rX{fyi&u@*Us?xorWC=&iJ{K^D+9> z$6nCG?Ys(n9h8J%<{#|$ToW6!Pl-94f9-r4537@$E4G@WC+}6o+Ge=DF&^D#grSEj z*bT({h7oxQoKH);p0vI$Y1ey~t&IKWJ7R;c91e2Ed}A7q2Zc`XZ^%Xg`6}jn0$_e7 z4#8vTyHF^`KPAo(?~&`VSO%Xu{t)_i+4n&`*Q41GzcXwxpid4`7Fd!s6M-!GNTkwh zP;6F!6XS9*X**|ND>}g8N}H&)GKK2Z6c`$4WA7^`?75MTZ`tk$y6T0_IVm_*uYnfR zxoFkSKx@DGQmq#FA`$A&KxPAIC3EhMR5(ZQv z?@JDr&MU`&FYITY$yrI(btYBg@DFhPg#JUYmvA zugO(u`5_kNd=@7obU9;W?^LxhWc{o#z%LKOitN#!^A*G4v+(OFnS{&_#f?ryX?Bs+ z>-nx60F}rVabYj-wezU+k@wM^elQdL96b6+zv3hZ=rXyf(%?sl>Yad zI6wMs_aTn~R_V(Wm&cF5+q3u~B4%LF6D)z>$s9!8b>k>_3QXrGvMh`DZF%wsw8%!vXFlh`PtkYAQ}JWFI=bcgV&hlx zM^;&(O+OzOLnR125)S1BvC!&eLM}@Y9#IROQp0^v%Km9-*OOKsEM3|MN>=}g7dJJi zs|Ml(=je42^jcF#`e(?w+zd}Bgw4W|Kr`$9@q9g!zBT3_mOdg2CX4)^UD9#Bq+RdZflly=yd@5(P(NGj zgD7JIw4;}@ts}X_2W;_sXaE*}O(d(u93Kn{QBA&-GP4TOt|zSzNnfik4%vJVN?z=B zPML-&%3mAM9x)Sr=zyG0#7&I78#?z3pGo9ti0xh z1;^q=r&36}o?MRs>@NB$&U_=M^u0gU z3}&CvHVbSTLFR(072Mlg;O-I%=V7L(QsTXDbUOKVrRbJ32qE3?h-V*p-+$ z{9(rat#r&!0G7B~GOrto>_g-oo00dV6^jjWMd+3FUaVR3RZJ#-Z_xfwd}ve0?Tvm3Jkta#I-;-T&0hC z*Hl-s9Hye_UIxr;_0Z*64Y~|t4}Eow)Vlt+mQG~RcEPR{%n{vnz>f^hch@^WZE`;3 zAG+XB_fYgNkH&++z?y~gYnZ2NWwMF99JikI5r!vClCVH>q!| zIO7h3lvy~v)(}n0m@8J!K#7DsGcBFbE#R`S)Ypg7`&sDTXn-zjsZBZNL)p?EeMb8t z*(C|fsZ%=^lwnbA2JnVHt8E=|qo`F3Z_>h;U{8#XVlI?A@(%ATxc{@JPVbF3s?k`L zY=TEaixFO*j`Vlb?M}5r)sLehP~8|;m$MhrR~=_}ve(oh7k~dbBITtw_R;S)oByBV zNpk0xW#E7cxrb^B*s|+}C?LNzFNYb=o4Q!oOa|UE>g+?vM$Ghu6}c7jADQAt{~~m9 z&cwm0BIFP3fPoE9#6gKR4lbU|&lh<*oEP^B%*Mtz2fSS5hf^DP{-VCw(4hpcnbYe# zs)W3$UqWL4SM(L^{yt80maVMUap8!rl0nZBc4@8kw+4b*w=q#gE#Zfvim&nX|x`W^tVTJ8g+)=?B8h(L(%m(*tXOEzuqz%PZ z{=V>Eu7Q$Jz>Ol#Jp2@`Svai{Tg<6OA(Fr)-(4x?_@pc^9g%^;LH)81+gV zH=Ky!nO@lB%swV}Yoz65!F7i%{_DV5WmPm}w(4%foL~RuNXfKeDeU1HP?*rg_F$oWgCZd(k%Tf9XDCU0u`*KevBLjRo zHv{9AX<=!!Gvse);Zl<=g6rJjJt`Hqu2>>|SS2hw=i<6~8Dws^!^-|2h0zNw{QTsJ zfyPFt2)4$YmR$BW@cDoss9MM34t3fMJ?N8KmyQ zik%Fh^AX5%D8LQ+*Yf+@VCLFsC?vmI<$fgwoJ_&bnrf)}56AUiS4E%sy12cJIk+J1 zXFsek?nFK&ueV3H414mL6LGvvpED5h4#-1#RZ)c<(eFjwx+bwW#S}qz*i-dP2O3He zlpe@Ok7KUrF^s+h*ktX)IR^i+0Gz2x&;_;C#DBCXwuU-?NA!AJMiV6NUc%gMy z7CcYcVd%OD45Ytq`&e(B2r9-DcG>J~%f(~=&tl(<_oCq_dn4>3agWd2DF>0=#n zequJ+q7Il??hBpX3E0V84?N4DM=pKx%~IS6Qo{Lze}vmy>LKc3 zv}NAk4`+0vaK@3@@2DaimPx@2W}2nF_bU!LDIzN>#D;udxH8W>K3@ap_nC7Bn2*N^ z=8zp1f+-bQQ0>T`?1wcN@tHizM>Wjo{u2gmAH~y9U8Js@0owpwX5T%~BAXBMU!L$D z6aq!ZY&e`SLM46I*A8c+$KYB>`}^TiS0uB$W$VUC*s>=WW@il%$9!s2Y&L!_VIFL{ zANncB!!Cq8afLGMa>#{IfeiKj`%^bmPq@YO;TlrSr57zxwq z)ZjL-M?0kkMf=$AoLF~V&ji9Jdd}SZ;ku+NjT9S)ixnx|ZIl@uQLKeBT zTAY*Eve!%ApjztQ@z6EhAyMOZY0CtB?Bj%GoB6#bOXc{%95S=Xopkqu=Ji?3DVt&> z^T>+3Q#dazh4tPZ=+W=F_{_PcLJ9BO+-Dl-DerqN7u)0=Fy6+CnZ!95IgmXmlbQe8 zO5LVoC0gC&&?4C^7Uvq`{EsC3?5%;=au*zw<6fZWijdnr%s7$Z@xc%mQYz8;Lk1l7 zSHgIIJLJXO6wTWDC}Jn>{Ea%$r4H@PIqba-W=GuSoP4-;l5V8M9u8W$E~%Yp<%jOF=mZg(`F z`%m<_uK~G=NF0WoC) zZg}x;EP7Sg!1uf!jF^}C#{K2%I~VLU^M+(Y3{H5O;vAp1xHku~_bQ}buhm)+60JAF zY>^Ib28TeMI!eBS1I+WXk(f)S#&d6M_?Lub%!^d@sKANA+33%ChqU*ckv(J)mG_^> zBHz?GA_3XtW8U(0WM^Ftc6D%s?D+ur*U!c^Rp#pW_a4W+CX#-+Vc(Am57`Iek}yTx zx7j%0VunHEEU`vHF6tX=c5J3YdD?6=UITl9s?h&o3ck##;k>dBn)iGWA9Ti{$(&tN zY0OjJ66~?hM9vH=JXqp_bM(b*aW=yIsrhK|nT@mLpryV1E1I{GKkuc7H}iv;J28gg zQ9GOtFF;6z8`KLzaDB)e{GgvEo!^5+r&Dp@a3!Set218M11VR}iEKA*$RCVB#4Bwq z`=8fkaTYY=BpAEL9p?X~BLAE@#%!dYlJ83=J_qIVTE!Ker$RDE19tMkcqwmy6Xcxi zB!}RA3;8e2A@~)L2nQ$T{!f%*Mcf=5FsgvG>-pRFq!;A6uw_5RN#!WqiqOP7b}WTH z%A#g&kE{EeN$leu6pXw8cP={@>~2 znBC(&ZWH&Xi*G!cZR6e^V~dBnb+}2-esXmsVk&=&ud$cKUiy9CS|;I~t{yJsu#03w z9^CG^KsB7%4Kn@|Zt|52;EoPlhfaYeb@o+fByd#B!A=+E1eSWi zpB$IvUgpRrCoywl7ViFLpQ)6(_x?A1uzT$rv4?r=&KG<+W7flDQwtnjlZEp(*6^oy zzyIY3D6_BIpeOq`1=;>;rRZMN7YfY1*wQCg-#-9HPaAT-Br{hr53*VGUHGtTbVwvF z#v9?SJAZCw8itN5!|&Bfcy#8pI2xjhA3E_+*vEUC`KBxJA zG?R-nIS)N&RAAk(A-J#lLM)xAgR4<7*hYQh^im08RH&1+I^d*^En4m*Anmn2wlUAu z+B^rVw=nPhvNyJWZWEjBG!byc8-qD#sT%Hv8HwrW|5*Y%9Y<8CCc`#QknvcFRLd+} zUQs6XJ^aa#LAcxLC;3EU(R-8~GGCbE#T#~f9?!&;CANqf$?vZv)>w`v74 zkN+0Nac{*BeLc);2}jv^_5c_#>+&NXvj;grX)GBgj)`#ksE>ur)2)1w`G1^mY1a$d z+zk@Vry`AgdS6neVt1`3*0kB9{2#wNvbM0SbwSIeB>cb5&iXB@?Cs(T3Zhtuh3MFc z(w%$Johl`u5(-LpcbAHeEvRF6j-6v-VWVK9*kWP7>-+u*=hwL|XT!sD&c0)<&(im@ zzy`jLpqhmfW6E&ZK^|9Ebbyb+L>#&92(|yr@#LL748G8}6h%Gso>|Q8n2X5K)@a;O z43m2~aJDXy+>c2;FY<>XGL>Fauh05g)dyozG;jw_XrU)gi(Yx{Lu5DwhcgG-41>Cs zA(@$5`rnEW757tIy4oPJwo@nE!5`kfhG=rO#qOPXWC_^9y(JXS&&T51MP`E67s7T_ z4n}mMF1@82j_rIYHdc`{ag(*Xwkf)|+Ttj8yeg^Yu(}=r*%M(n{gHS6K61Bo%8*4S z*HQLWJuco98<>+d-iR7)Pkk79+o8*sEZjS1iO#>t3K%^XK@Wh?3jVv})Wwz+N!HIx z{hn$0HH$XK8>0CyBOKixh?V0s@pBqEmDM?Lv$ch)Wf)XN9M%uE!2RYDlz379U&Fi# zwMU}v`ct7rzHpXeC^ab>&~$Qz8|yv=dnW{)2}W|CBqYR=TR|W6Qu{2-e^-vLv5!QA z`Bf2k#{?h9>Dd~tjW+7N51h_kh5cZ1C}jNMpul<0ATwz7DTO`j8A2kUHKz~W>b??xsGE7a zIR=je`C`JUuPZPj4DJd(Z!P<@C6k=Q_v?g;uejX&jK^S}0(NpC~5 zVkBbL8DkRPOQ-f{;Tv_1QaQ*{&-?XY7{(61BUUqCrFmaC^p@JdZLTeP_hl9n^@BN@ z)0x|uju=yBLCCN!>sW}Kzoq|vQujHH(>r0-=qKV++a!$T?CR)n`XjWhP_E2A$IlVD zb>3KYfVpLEI#3@{gguvH(S?6Qsr=-v8C{tz`%OgBS8&515UPK4@q8%jf|MLATk3}E z7pCBHX)2W1=Y%=ZXL>#Z_3>4Zs-JkB&@PIpO<$L7fQRE|U{$IPwZ`^%Y@H2{HFjw9 zr0=?H4pK^uQL9$UjGat`ai4Q9VF;8q9}z0lr@bUEWm6>gb(7rCL9qb)_Vc~=#{r}A z3NWtO1rJP@L(zzy7uzL}dY;}bMGSb;0gE@Oq0b<9TyCar|Dg-Svn;qIIAb;W^NV-I zV5mR&Hk##VeoFsJdKtpiUWkAL?}WoKEyUKv;;Oy{94efl`XCP@_PgV2&sdBnPc@$z zFt#o8=-)}hoi__4=iJ>X>VstZK9k4lV6=JwevH;ZWeR7ID)gZ&w#2D)C&+b-!Gu!s zL$XTX^g4~6yduaKzYx#Iyb*_LHLy^|3r7FxVuGR#j_l_9`kWQC2F^mB=4@EcFykz` z2phRKw4`6nRJlL)zUsjo9qJkW=S?1Og3j$^q;aOS-rSXQRxc=!$8u^gb6c*KqpM;X zTy^MUka)jKJ#S9$F1Xe5MJTV)!PQ8J3ytgEGBu5hvHZ(^maFf>pVNuuFk=%5PSI0x3sWjHukTvz#F9^tUI2G zy-7v9LwaCB<0~=Z=tRUQ17~(yLiw5%LOK+{@svFt44#P|<F{tJq+lvEZ-w=hP%@N9Hz@Gc`1>2@j zHSecY<=kDD1PO{{4wQ z#;ChtPINj>-EzkBpI*4s$h-;uT^|Nlpy4O8dmPL0@kj?$`ZWr#3D$_aScLrL^sP;| zKp%Sb($2bIf#)?xQFI>Wn7iQ0KYsYQKMgKvCYbS|3`>eKFy}t`p(A_YmT8UXwJ@BSEY+CLIp19B zBUkM!!UB4<7CxXBFe)3n_w()^w}f7nQqBVx!KB}BaqzD%;=6?*`wL&VU2woi57xw8 z$*5bx_l0vd7H*5ebb3Ht^DCe;kZfsGp!tRj9#}Vvhx}gNGY&#zp$6>8)cc}B{)US- zu21wKGsF|;mXjYA&)wIXP&o>l< ziqhfDJ;J-G)i9CELB+rdau^0;=KW`)PoNHV|C)_GWoFbYSmEya0<;?2AdK_6i|c1$ zb)f~GoMwJ|Q4Xy6{=L6`Ea!H|#H?|8)N;(iBvU&~-ENOP+vrQ?Jj+ud5Fbp_(6K5I z0d4d<-61!7KmFe2-Ei9cmbl)Z`{Exn(5SA1p>@n-v(ABRqAgbR@nSY*9OQZlRF0x= z-Z2Y4+;=FIb>WWwn^nWSeMXt& z^YXvR^xAo*b;eF8@GHIfUVNxth*XH+bTL2zpd9*l9sUt8wm@9D+J_?PUAmir=k z%V)9YkQO?1^2B&2Qydy-iMmO7$f9pvuO%FH%@L5x)P^EC68a}%Ve+vA)-s*Z5b{{G zMCs$L412vOV`v*#VRTSFoMfCZS|Gy#ASjv~O`ps<%hZ0$cRq{LE6_9c#4bA>M=jcc(JR?+FFX58~gOZ$$)sZVexU zvC3SVeIWDSf0Db|i$0OCX^4841jl>a+xM%4|9|o1kyN32K)Ptaez9j;Frv?wAiK>N zo7eMu=9vJm7Hd4@uI>KBWORMWdBV;`*zzO_*2RmUSJ{akpBG~Hqe+JK5-V z*BR~4J@MsrI>rq&hIJWvNX#yH|ECmRK6XXQUx!48AYG`(CBya%a}lZM^G(ge$#b@t ztvnsJ=aTR^%LbqFDlmlkKT6a^>mE?Smq!E8uU-zS%(ic6wSwjpPqZJ;!|R*gDDOKP ztyh!bD{q4Y?gQ=&AycAP6{ODDFQ&-g@6Vq_c_H-(FK5Hy9)G_9_IOV};=TRO_`P}x znIm!N(M=n@R#d{1KEAC*)mUfSg*@*zvE66_M&^6rRlFWf9&<-fJo9`WJ42<5FOGdp z#2jK8G-j4zbzKJPPD#(-l&U+Nl-z;3low)DcP*H0W=^k)K6am|F-5Ln4HUP2d&_AFAtty%@JUdgp1TD&Yx$B zwN4A6$NjU-wm%pB~xxe#}Pn5nLFO~g?*c&;)D4{EiMn_&lU`nd;ea>2#1o{;NHrqn2P@_#B& z)g=L~e$|q>p;CP|4-@E}p7}_uveZF;6@Q!>X~1^`^Ua@;54O+>s#ijAI(060U+i7? z6(b-pA5)qAxbj*r1XZ<*hepQu)YlV_^T-c9XoCUtg)H>3hQeQ-(0?C^SKJe%?V}Hj z`EaG=E=$!fN%gTO7u^(^U(bkhXM^!ZJ`2l&HE{Q;Grky-#XX3b+KDsCILO1;aOUb1 zFNDF9BJ}oNAbE}{RSzb0ZrXoEt9YxhPxQ>!N69DpXcPpxRXCyl!vZ)_r#kCOB2MRK zAcA`Mdoyd$_cHTNUM+;5MhDzyFP?0n$qW+*%vxfI0Yj~EROGOhaD;ohKP)GO_$$Iw}#Bf0aniZIS9j(U{wdJICi@SM~%bz3_yA8~v24Y;cgA!G&uaU~n!7{%+9- zHnPNw*F{L`#XZ)_5;TW&z@odS#86e{{^evKa)>eH4IFT5U?Kb#yTPE>O!OVk?38pv zc!gEM-Gdx_m3atO8B0IvL9zUp4!lajaDmza+hoFd8R^*btyt{vo%9;JgdN2b7IoFP7x*}?rzF3L7I;6g$m%J+vuN!}EepUUu} zPd5DTm1C(tGY4$1ij&+g_Zb$!oECk!6_6{Ym=B`}J9xFv#)*?r=uM7u5bMW}e`O$b z7&(M4U&N4tkHVXEM&f*CldmvGBRz2mjrllp(+v-9M;woK+{d%hS4KXie~ zuR5`3rWveSOVHL=8~41KbtG4S{(o$l%R3n-Ta$5hnl8Lk=%Ymzp8S!1p48_->hI@~ zF9VNV_r<4I`WVkW*A40&jr($DnUza_zBTkTeNg{!Htw^&(c4~xYX&L!V^b=5&L_2> zP8!e^^{<}^xn~x*njeGK6Xa5#vPAOCBFx`JPwM@tkPYXIV7(4>I#!|}G8-|8|MOW% z%hy(;=6+h-d$Bk`3+C1Vs0*V$E5{D6*lV9ua^bv?d1vIf#FJ;`#l4uCe+teotdN{f z@}ss{+^@PJc9xl9G>~sqh_YhRbKt@h^GYWBYobkvyHC&n{S0I~$t+q~X?ZQ#?Mq0O!Y0zgjgPnbW!; zEA+A$*v$a*lzIPm(!|xH?g%`Ti7zvq(5b`=PqQ-7rHR~qHTH{v*_iUE0+k;9k@xJe z@H(Rb%N3rm9%PI(a=tnRXJhqnOSDh+#Ossc$TqXa*o%dT&dS1GGM^T#{3cYxpNX&J zzP5PV;OJ)^+#(xxv1bmNwwoa!Bn02BB5-PqJ?do0$Unzi>cKhq*t-uBU+fXz$FUw# zB759L7fEK$*uE+AL7F4Nr>al&g6sv z%$w{{#rbFh_kbft`6Ky343xj41M`_~K<mn?1VNNN&uBPT{KnXK)e~NoMUWo_0HOX}gpsqrnTu^spaYlUh zmOcK=osILQad>jq4%=x{!ymDi5?e z2cm6nCKlb)!%XrEe2>vbd!ZV+e}0PaV^@Sjy$-#uQP?n@Irm2#@isdT-Jdf5=wlGH zmt??-``ry{6?oQ{IwSu5ZJHHe)ACUm%+Nwx{bcBUwM2Y>`fDNz@lJ;OoS=B@nh}d) zb7perlVi*|W|&JcE~a%u$?sp{^kgmc?>ZIwU+G8W{cNV33p-ys)SsG)Eu8mkdSr@S z2g?u>nSmaw%P{(+EJEvEhzjNvME8k-#y(wUio2oidoJX@xxw9i8Vt2E;D5ywLDR@_ z)Xl-*ka;+GTmc8ad=au^HL)^=b$JgHBv6N0Lx0k&QRLb_4?_2Q0ht z5K$tz?paA`jL#G z%=?jAXGztcNjHs4;lgmVI^WnMD z^0hY@%3#W0pT&J!9rT(wgPcp|y|%ibWg7jP>YkW+JODnsGGKO({cl4hj@x8lZEZCI zi{&w2`GN4BXo~de5ausTLc~ZHY*Wm}iBUF~EjJnJC&& zRC!K}8N!^qsaebm(LqmHM+7JoAm=vo)mJ;9=tc%IH!>HhbUxza>5uEXK(ZcF>OHnG z|Es8YML*tRL;CZ5v0;M&&K$9W^27pkOm)QV#!xhLoQo5}6l?mIp^f!IXZmdnEc)PU zQ@fBou8wcJ0F-T2gTte1M{ zkElsld*0*!XFSKoJLASte$W1LL$ygDGEI}PR?ZTSwaRdJNhYFamrJfg#&hnrOs+-z zlI5;Qcyiyb$6O^(6yD0hzRuRz@O>s4Cd4CZxd(b3FGWAAR_W91hH^&(4`r(RnK1cf2#i;yKgcq&v#5J4eqPoX8Tm$tr zovcwv?yklx`Y`E(h`1S#roo&S$2g&8ODT3OOGTh8eIfU_$J_f|)CH+K0#ScF78^}WurIO_ofl`L!mkoi>z*Ms3arnciGgO!H~h_c>-jNAj&sH; zwO9mwu)*(G0j~uKc=|&RQ=BXCOECcz)E6vo?~SKvjiT#e^0L=Nkl(D12kV^i?}v1( zU+<0=&cMpQ=-KCsLxBLD7Ok$r?Lz`_!YTA_#Sy>w7hREW!= zy7)Ss*$4k5K+%`EdA8))a^86AYKdfS_Jy^dMGyO@!s)sOhER`!S`+41*}|JVfdk$y zkh2Se!oE0+vZju8ZV5j3OTmh<31)kDiUV=GdORKUK<5;c{A#vU>%(bjili*_Bva^+T+!?u-FVF53J>i4 z6NjL&+UT7?E%4(AvQCR}yj6*s?w7(fVItgm`yuq2G5+}2;tumGfA(;~hBh|@$w%SE z4Fi-8EQNAWJY1i%9+bF;wKeR3G0h*v%fEFoH_8X%k|E^&6Xc@jA+@7DTJ8tnZbbw> zE9v4@5cMzesgTVpMFICmS^b;Da_R>wrh4PdP(5_JV2!7rI7ezGduFy1VuK^F!GoLy z-C~^jl!|4{{a677$X`+==Y0b5&rbwV;iy$*j~SMSfhvOCxdoo(*i@^S8!1o0A>T;OYtUuF>38n&GslK<*6I zXRcM~+V_XZKm1sjDC!{KpCInzm@B)~76IG?^$m5!u#JJJK@ybv>Y&s05+vGCt305B zb5wbZ+kalD^fSZl%2bq@XrSkDOKc1#PwtNmTFg7VKM;Qxt@jZ$@Z0w}QEOKC&&yYgP#%k2@YUhYewrQGjzzDJYgNmhA87xAcnWb+JYa z8RQMScLjL7R2vtsn&D|Svbnq;N+9(-FU77%dLhS*v+-yT zu|rSlns;P5VF=%~MW)Q1KfrvF?ooK|!~66f?n!*oQ0!TTXY0G;+>JLGBS6d~hZ&1&W4dxK&$;TW&d!(J7bw`$^4vk^1}1RO*iQhce6= zP)AI#JMP99Bk>UH!45f)Q*p-BUqMJtPr%6;4)EB(_rvIX9DP=bIk&pui_CLzmO6ox z`s7BEB{;-{IsxZg++Ji01N#|R`X&;A&mB>7z6d=Un8&)Vfc{uzJo(rW?cK-V^)xe_ z@=wC8Id)jwzXG%E9N?qOUJ9{@G>(E?STUlP7GUXDW-3ZNPimg2y752+bzLhqaepAA zUCR4PAO5b+oGs>H*9|MoDsaOadTVN!Ej{qUe5AV-K&^%KPu@c@M&`DdP+)+KIn+H5 z*1_rtE*QL=nGUrc*gq@`cE{+M9-xnU@|W(*r{m(wd6+!-g3x1so3_IOQ}pLT_BnGW zI$PrE`5Zh}x5lvXvru^;8i{*!;YNReCG~tAsJ{u+da%lwNnV#e{-ZZ5 z`ez=(dEaXNCXa>whl3OJp>v-+fN_~{;5<^QPE|#vCtiJhBBmwi;MLm@{N}v%OMm7a ze`H?fI7c*YcE@z~{f*q$nRr*?B$?SW}ff;>_ zWMm^f${YQTbXrRR&2G0dt8Quk^eM}CQ+^M8vSeiIRWz#IG71K2XJ<;LDZ=16#w zr5_L30V&YC!mQ_*O1MSkV#z;MSXIm(VDnR9q&x{#3f{~PBd;b@poRY9_}kXFyWJJF z2j^fC>%q~_>DM+(#IDsPlHX@;%ari^*jKUIQ4_Zw!V+@^VmDn&Y9re@cuX8UEjW0KggJHp#xPKw^PnjU8uPxU372y`yUwvcp@C@`f z)|#W|rBxWGRDf^wDYg7&Kvli?wU~3WM@hJDsf%0N9ATYV0CAICmaZY#8=Q`K zPjbdjRYG?=_XzWDaKmZ)t~dI^~TKQv_io$^3mvPk7@LWSI8{1xR{bnOm24Hbd0bm zk=(!E9NiZ&8{Ui4ifVAY!ae#J;WV<)rrX2l?=)B#!!#VCu&#`pSs zByQ-50rQ zn2{ih-UHQfgdC2}@Ab%;bH>DgOf<7TNSr?n{nX~-V!H)yhnB$2Jqr`9it%9iXEECR zqwu6}HGp2tCvVAHtF^|K^V~J~xgcJ9CaTUwBln^vmdq-{`1&;F3G= zHwig^?k{&o;DCxQ>bly&&oT{4otz;*+YGhtD&evu3mKYxKTRBt&7~cXl*F8$ ztNxflf1Q7f4JN2i4^ZWXJugj>U>=DJ#=KAeCcio=0#ByT!;H1vF>L=g@s@Rb)8`Px zOwdH?M?136vJkh^9DJe@;|*pd4~bjr{}uRe*m zyW9i{;m)udU5N7D9#HGzk88{VUTRMsSr_t!-e+NSa}ADX_QHu(4MLkfQCl14k$RJ_ z<>G*aQS?G{mUlt<0H2V#@90r#iV&|ap6$}oG3 zJ;4}8gMn{r3(&2F zJc7;Babs#6-w%37yjqA!#$+@VmO`pNS?Y6i;I22Y&Y z>xKJ!I7|A;?5Z`n__2w3^PN3$cE@~_jLgQ3VKtKTu5TB%iI?ABiNEcP;d?d)Q{4@@ z8*)W&`C@dO;R++GXpDDGMX-UuUA=iQrUvk;)jY{ORjE2TsX2F2`;H+^@<>nWNLKqu zOkWJ#(Km+Ga2FhYmqn!#bIAOrp+zPUYEfo5X31=-Yl)b$pj2|7BlUTg>8y+$^=;zC zTKeUErXkqL5D~U6Xd~CwMA-?u?WV#-n|bs4^ab6jL>)OMK{qQPwSSjdX9bZ9E($IS z)l?09=n{nZHO6?a=7_cnIpp|~L)R35@0s+<>*yohxC8^`<8X)j{@425@Kd`<7)Omq zVUQ07=@{eWJ1e9VkW;?X29;`l*l0Qz+d|DTwMQ|%MyVL|Z!r|hdf}Mc6(QSM2Qdeu zvF9*3!)KK z=UT_P|8cpPiAWbS_*ya3@qHLtldX{Su?Xe-oNet3aQyK@5&i3fIR1VDR;Dl)=7$Zs zrZDenb~fj2&baLv10&Xe?n!2tcz_(-Cy}_SRL1-?X>&}edN8TyNu9rp{nj7v-aiym z25TU2C3k3Z4Dl_L`-*DjCBG*xc(NN@nj%p&!UUJO&ry6&Kl7vt$^BpT^A6ZE?5SAv z!4SR`Ga>W0Hd0sE!*6m9RI8mZeY!VPxa$qoGC=wGGQ^%pg&pd~OD!`vNFcgP>zno_Cd1Rls%!>le3>r^YSBcLPf z`@9!Fx9XyQaUk+COpyLJ{eP1R;XvJfy>AFgLldFffw>^t%g{*AiDqgg(t0(CBNJ~3 z`!G}V{X7>7m~UUN>W(uOd1!NRfp*s@yym`p$~&NkRW+WNk_Y5og`-zx;kL0&+_|ca zVhoXzE=rX-9KXYg;rthr-pehQxJO5 z5VsAjn1!4JT{BzEV|}Jt69J>CCg??;*cfyAFnBkb=6x1&?vF%Cr54gJ1*13f&UVW? zAyeeSW4jwYHo?%cOhoA#BN*A0Bj{)<_A*CB>Uo`}D8nMsmS1y!&9-LP778t zp;&ds6oQ`a?bq}1_@D#!M1zbO?8w2#vyc?$)QTE5Tp0{+HcYgc*^lNPouXmVUzH ztlh^)nxQn<9-R-;<1@_yRlNi7I&=;+sF__7UqnV^0R|1_4pU<84B8&|4fN#Sbth<+nEEyAYxk&Ed3FZyP&6lhfwp>(Crr0a7?63iI zt0FPL!vOBxrebfO43yK?8dH*lFV^WOnC}F~2Q`=#NDfbWHU2xRfQL1=Me(Q+82iNu zhsbRV)8L#|HXFq%)+lTB#Yx|A3?6TU9eXE`>6jcm z8B5!};KGcT+wZ3!tcH1mv$G)Eh55Oa3S@mf6~oVJL;3d%_-R|(++sr=rB#5Zivw=6pmO*(hl|kHL=6;luYkJfhKQ#^bd)s4JaxQwEx2JbK5Nl#%AoiJKYeq48u`m3& zr3fQdpN$f0DhpB@fF849QozFH1jLwBts67_lBb&Gj`Q4pOVX?Rrw@NecXHBMji!X0I5ZvHbvbXVrK^vgs*HS;+3 z{T7-RJK%K~_4B)?L51_=at#-FG2deADt8$4jKGDYI81*^K9Ok!!b13dJkGq&(<&IR z@IWlgFhDT#ZdBVicO2xwJm56wF*m+$kf4vAUb45QI5Be}woFJuw(UZkbW}k7iJ!vv z1^qC&Q=o9i5aVxoVEl_5?6#$Dn0a8I4@aZVM_0_hL>|(SB3SWVAXR59RZlB5cU9_q zw;T6r-xod?)9y1ba^7UL)(9XKaN|Db#Byf@kU(l@ITx*K^{~2Q0ao}YKz&;YCbr9B zquL*#^g|u{ljwWD!5m_H2mGN9_rE%4_~?_fR~3z`&1U$Q-?K0OWnngT5K_-`Jo!uP ze|<|V)6v5rcYdELIUg;y!_icFb1yk!a$PWN5|R)UYzl+B^lN)(Gh>x~l|+uG)O?`y zwS5qGwE-58oJAF2%QdBVK zb%Qv{{4J%Q@lgHG6yEeBr#BX2^gsuc7Tm@Wy zbYCp}&AG&$**Lvh8=$`2U^qF7yPdF3Zz|3tr(p#1b4JuuV!K{8D&JN>YMm8!_>UO8 z{+GDM&*Ms~J<>ZFLuR`(#y!i%AYW=p-ZH1~SUAQVcEr)mMHqA^4+?_|5D?i79Xg&6 z17tLjZy$qlC9;r?TEfqv5a!f*q^8Y4{ib+y_{Mw-=5`JrlYy&`%F%nqA5oazDzcgl z$pBAARUaM9?Cyi>?YYn%=Yd1RLZIWqon|I`t?U{spI%7D-#o0883>odZ^WZ!4SYW` z1FrIBSXXU>vH6AAPp-g5?vy&8k0saJh_zY;y6Nz9nO2QH^WTWnfse%{E#^}!3xF7^ zi{SHS$R0)SU2jXM&Z7>gCJedb4IwkN6r+b{L;hD8{ym_I+if4jjeRDVryGgtmGn8T zvBlXn`53N6E`O>mURy?Db*et9*-r&^&cfv4c}TV&%pJrnv6H^6hqpsuu$lSW!)@@M z^VzfXH;$aj8pu5fF&)gYb4NLnO46ZWMt#A-Z{pm`r=sD2KGuGS!#V0hnwcZ4yFVYg zW$rN99*pXz@i@^_4|*Dv)GwvNDSjTL`Wyc#Bg=TmOEIWH5C2XLgSwLe7Cdo4OIjX8 zi#>)<48z-7QSgZt_+ZJ{zp6Diwfqlc<7#&@xWPj)VxuQ)=# zI0CEM;&Eo238v?jKpXWNoXK3uN5n^W;9fACo6@Mc z(ZjKg^H4i83wHAtVDDf>ERg>xLjPj^RJ#XGcP39_8M*rVvfy2B4bv%8U^sjZ#tyPX z(V|jhcFZADvlvqK&{BWS-dY{8TfZY_ebC0^DfDvurNf+XTZF`s1AT$tn|jvQvSH{% z?$q#ha>2f3AUM4gQhn?f(uTq>_LmrKISLa?Eirnu5sKfr!KjXZvlIt}F0h4iRx}>k z*<#;`66Ss;q2zoa&cB?99j6|P%ui~#aVvnejtyS2UVU~T6ZPe8*r;R6`ZEbO^>$Ev zQUR^+30PrMC7Ew3)h8uY&mi@4k$S&N)fpSd_dsam53$lv4V{=bwK^NAd?I{w4&Ed;=Lj@pE+r*418UliOO(I*s)iW|89V0OM9&T zo`dPn9C4#G5dA9SF)EIA58wSKsadt5M?mU6pkM1Rp=Uu&`*dAAKAwo)YKCOwTVwFm ze2glhF1Bw7^0!B$Xsa=N=9EEWS~jjnS3s&ieEIesD1Fie&%DMWdYmx=cUnMpnH}EJ zH~2ox3I7bDej+yveNS1S(60~*?rC`4!n|FHeBo@pfw+D8lTh;;%iXRc(rQgnOPz4N zX*Q~S?O|^)89xf6@xj*$V+T@~H98$uz>% diff --git a/tools/eval/datasets/minisupervisely.py b/tools/eval/datasets/minisupervisely.py deleted file mode 100644 index 63008dab..00000000 --- a/tools/eval/datasets/minisupervisely.py +++ /dev/null @@ -1,202 +0,0 @@ -import os -import cv2 as cv -import numpy as np -from tqdm import tqdm - - -class MiniSupervisely : - - ''' - Refer to https://github.com/PaddlePaddle/PaddleSeg/blob/release/2.7/paddleseg/core/val.py - for official evaluation implementation. - ''' - - def __init__(self, root) : - self.root = root - self.val_path = os.path.join(root, 'val.txt') - self.image_set = self.load_data(self.val_path) - self.num_classes = 2 - self.miou = -1 - self.class_miou = -1 - self.acc = -1 - self.class_acc = -1 - - - @property - def name(self): - return self.__class__.__name__ - - - def load_data(self, val_path) : - """ - Load validation image set from val.txt file - Args : - val_path (str) : path to val.txt file - Returns : - image_set (list) : list of image path of input and expected image - """ - - image_set = [] - with open(val_path, 'r') as f : - for line in f.readlines() : - image_set.append(line.strip().split()) - - return image_set - - - def eval(self, model) : - """ - Evaluate model on validation set - Args : - model (object) : PP_HumanSeg model object - """ - - intersect_area_all = np.zeros([1], dtype=np.int64) - pred_area_all = np.zeros([1], dtype=np.int64) - label_area_all = np.zeros([1], dtype=np.int64) - - pbar = tqdm(self.image_set) - - pbar.set_description( - "Evaluating {} with {} val set".format(model.name, self.name)) - - for input_image, expected_image in pbar : - - input_image = cv.imread(os.path.join(self.root, input_image)).astype('float32') - - expected_image = cv.imread(os.path.join(self.root, expected_image), cv.IMREAD_GRAYSCALE)[np.newaxis, :, :] - - output_image = model.infer(input_image) - - intersect_area, pred_area, label_area = self.calculate_area( - output_image.astype('uint32'), - expected_image.astype('uint32'), - self.num_classes) - - intersect_area_all = intersect_area_all + intersect_area - pred_area_all = pred_area_all + pred_area - label_area_all = label_area_all + label_area - - self.class_iou, self.miou = self.mean_iou(intersect_area_all, pred_area_all, - label_area_all) - self.class_acc, self.acc = self.accuracy(intersect_area_all, pred_area_all) - - - def get_results(self) : - """ - Get evaluation results - Returns : - miou (float) : mean iou - class_miou (list) : iou on all classes - acc (float) : mean accuracy - class_acc (list) : accuracy on all classes - """ - return self.miou, self.class_miou, self.acc, self.class_acc - - - def print_result(self) : - """ - Print evaluation results - """ - print("Mean IoU : ", self.miou) - print("Mean Accuracy : ", self.acc) - print("Class IoU : ", self.class_iou) - print("Class Accuracy : ", self.class_acc) - - - def calculate_area(self,pred, label, num_classes, ignore_index=255): - """ - Calculate intersect, prediction and label area - Args: - pred (Tensor): The prediction by model. - label (Tensor): The ground truth of image. - num_classes (int): The unique number of target classes. - ignore_index (int): Specifies a target value that is ignored. Default: 255. - Returns: - Tensor: The intersection area of prediction and the ground on all class. - Tensor: The prediction area on all class. - Tensor: The ground truth area on all class - """ - - - if len(pred.shape) == 4: - pred = np.squeeze(pred, axis=1) - if len(label.shape) == 4: - label = np.squeeze(label, axis=1) - if not pred.shape == label.shape: - raise ValueError('Shape of `pred` and `label should be equal, ' - 'but there are {} and {}.'.format(pred.shape, - label.shape)) - - mask = label != ignore_index - pred_area = [] - label_area = [] - intersect_area = [] - - #iterate over all classes and calculate their respective areas - for i in range(num_classes): - pred_i = np.logical_and(pred == i, mask) - label_i = label == i - intersect_i = np.logical_and(pred_i, label_i) - pred_area.append(np.sum(pred_i.astype('int32'))) - label_area.append(np.sum(label_i.astype('int32'))) - intersect_area.append(np.sum(intersect_i.astype('int32'))) - - return intersect_area, pred_area, label_area - - - def mean_iou(self,intersect_area, pred_area, label_area): - """ - Calculate iou. - Args: - intersect_area (Tensor): The intersection area of prediction and ground truth on all classes. - pred_area (Tensor): The prediction area on all classes. - label_area (Tensor): The ground truth area on all classes. - Returns: - np.ndarray: iou on all classes. - float: mean iou of all classes. - """ - intersect_area = np.array(intersect_area) - pred_area = np.array(pred_area) - label_area = np.array(label_area) - - union = pred_area + label_area - intersect_area - - class_iou = [] - for i in range(len(intersect_area)): - if union[i] == 0: - iou = 0 - else: - iou = intersect_area[i] / union[i] - class_iou.append(iou) - - miou = np.mean(class_iou) - - return np.array(class_iou), miou - - - def accuracy(self,intersect_area, pred_area): - """ - Calculate accuracy - Args: - intersect_area (Tensor): The intersection area of prediction and ground truth on all classes.. - pred_area (Tensor): The prediction area on all classes. - Returns: - np.ndarray: accuracy on all classes. - float: mean accuracy. - """ - - intersect_area = np.array(intersect_area) - pred_area = np.array(pred_area) - - class_acc = [] - for i in range(len(intersect_area)): - if pred_area[i] == 0: - acc = 0 - else: - acc = intersect_area[i] / pred_area[i] - class_acc.append(acc) - - macc = np.sum(intersect_area) / np.sum(pred_area) - - return np.array(class_acc), macc diff --git a/tools/eval/datasets/widerface.py b/tools/eval/datasets/widerface.py deleted file mode 100644 index 50237c2e..00000000 --- a/tools/eval/datasets/widerface.py +++ /dev/null @@ -1,315 +0,0 @@ -import os -import tqdm -import pickle -import numpy as np -from scipy.io import loadmat -import cv2 as cv - - -def get_gt_boxes(gt_dir): - """ gt dir: (wider_face_val.mat, wider_easy_val.mat, wider_medium_val.mat, wider_hard_val.mat)""" - - gt_mat = loadmat(os.path.join(gt_dir, 'wider_face_val.mat')) - hard_mat = loadmat(os.path.join(gt_dir, 'wider_hard_val.mat')) - medium_mat = loadmat(os.path.join(gt_dir, 'wider_medium_val.mat')) - easy_mat = loadmat(os.path.join(gt_dir, 'wider_easy_val.mat')) - - facebox_list = gt_mat['face_bbx_list'] - event_list = gt_mat['event_list'] - file_list = gt_mat['file_list'] - - hard_gt_list = hard_mat['gt_list'] - medium_gt_list = medium_mat['gt_list'] - easy_gt_list = easy_mat['gt_list'] - - return facebox_list, event_list, file_list, hard_gt_list, medium_gt_list, easy_gt_list - - -def get_gt_boxes_from_txt(gt_path, cache_dir): - cache_file = os.path.join(cache_dir, 'gt_cache.pkl') - if os.path.exists(cache_file): - f = open(cache_file, 'rb') - boxes = pickle.load(f) - f.close() - return boxes - - f = open(gt_path, 'r') - state = 0 - lines = f.readlines() - lines = list(map(lambda x: x.rstrip('\r\n'), lines)) - boxes = {} - print(len(lines)) - f.close() - current_boxes = [] - current_name = None - for line in lines: - if state == 0 and '--' in line: - state = 1 - current_name = line - continue - if state == 1: - state = 2 - continue - - if state == 2 and '--' in line: - state = 1 - boxes[current_name] = np.array(current_boxes).astype('float32') - current_name = line - current_boxes = [] - continue - - if state == 2: - box = [float(x) for x in line.split(' ')[:4]] - current_boxes.append(box) - continue - - f = open(cache_file, 'wb') - pickle.dump(boxes, f) - f.close() - return boxes - - -def norm_score(pred): - """ norm score - pred {key: [[x1,y1,x2,y2,s]]} - """ - - max_score = 0 - min_score = 1 - - for _, k in pred.items(): - for _, v in k.items(): - if len(v) == 0: - continue - _min = np.min(v[:, -1]) - _max = np.max(v[:, -1]) - max_score = max(_max, max_score) - min_score = min(_min, min_score) - - diff = max_score - min_score - for _, k in pred.items(): - for _, v in k.items(): - if len(v) == 0: - continue - v[:, -1] = (v[:, -1] - min_score) / diff - - -def bbox_overlaps(a, b): - """ - return iou of a and b, numpy version for data augenmentation - """ - lt = np.maximum(a[:, np.newaxis, 0:2], b[:, 0:2]) - rb = np.minimum(a[:, np.newaxis, 2:4], b[:, 2:4]) - - area_i = np.prod(rb - lt + 1, axis=2) * (lt < rb).all(axis=2) - area_a = np.prod(a[:, 2:4] - a[:, 0:2] + 1, axis=1) - area_b = np.prod(b[:, 2:4] - b[:, 0:2] + 1, axis=1) - return area_i / (area_a[:, np.newaxis] + area_b - area_i) - - -def image_eval(pred, gt, ignore, iou_thresh): - """ single image evaluation - pred: Nx5 - gt: Nx4 - ignore: - """ - - _pred = pred.copy() - _gt = gt.copy() - pred_recall = np.zeros(_pred.shape[0]) - recall_list = np.zeros(_gt.shape[0]) - proposal_list = np.ones(_pred.shape[0]) - - _pred[:, 2] = _pred[:, 2] + _pred[:, 0] - _pred[:, 3] = _pred[:, 3] + _pred[:, 1] - _gt[:, 2] = _gt[:, 2] + _gt[:, 0] - _gt[:, 3] = _gt[:, 3] + _gt[:, 1] - - overlaps = bbox_overlaps(_pred[:, :4], _gt) - - for h in range(_pred.shape[0]): - - gt_overlap = overlaps[h] - max_overlap, max_idx = gt_overlap.max(), gt_overlap.argmax() - if max_overlap >= iou_thresh: - if ignore[max_idx] == 0: - recall_list[max_idx] = -1 - proposal_list[h] = -1 - elif recall_list[max_idx] == 0: - recall_list[max_idx] = 1 - - r_keep_index = np.where(recall_list == 1)[0] - pred_recall[h] = len(r_keep_index) - return pred_recall, proposal_list - - -def img_pr_info(thresh_num, pred_info, proposal_list, pred_recall): - pr_info = np.zeros((thresh_num, 2)).astype('float') - for t in range(thresh_num): - - thresh = 1 - (t + 1) / thresh_num - r_index = np.where(pred_info[:, 4] >= thresh)[0] - if len(r_index) == 0: - pr_info[t, 0] = 0 - pr_info[t, 1] = 0 - else: - r_index = r_index[-1] - p_index = np.where(proposal_list[:r_index + 1] == 1)[0] - pr_info[t, 0] = len(p_index) - pr_info[t, 1] = pred_recall[r_index] - return pr_info - - -def dataset_pr_info(thresh_num, pr_curve, count_face): - _pr_curve = np.zeros((thresh_num, 2)) - for i in range(thresh_num): - _pr_curve[i, 0] = pr_curve[i, 1] / pr_curve[i, 0] - _pr_curve[i, 1] = pr_curve[i, 1] / count_face - return _pr_curve - - -def voc_ap(rec, prec): - # correct AP calculation - # first append sentinel values at the end - mrec = np.concatenate(([0.], rec, [1.])) - mpre = np.concatenate(([0.], prec, [0.])) - - # compute the precision envelope - for i in range(mpre.size - 1, 0, -1): - mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) - - # to calculate area under PR curve, look for points - # where X axis (recall) changes value - i = np.where(mrec[1:] != mrec[:-1])[0] - - # and sum (\Delta recall) * prec - ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) - return ap - - -def evaluation(pred, gt_path, iou_thresh=0.5): - norm_score(pred) - facebox_list, event_list, file_list, hard_gt_list, medium_gt_list, easy_gt_list = get_gt_boxes(gt_path) - event_num = len(event_list) - thresh_num = 1000 - settings = ['easy', 'medium', 'hard'] - setting_gts = [easy_gt_list, medium_gt_list, hard_gt_list] - aps = [] - for setting_id in range(3): - # different setting - gt_list = setting_gts[setting_id] - count_face = 0 - pr_curve = np.zeros((thresh_num, 2)).astype('float') - # [hard, medium, easy] - pbar = tqdm.tqdm(range(event_num)) - for i in pbar: - pbar.set_description('Processing {}'.format(settings[setting_id])) - event_name = str(event_list[i][0][0]) - img_list = file_list[i][0] - pred_list = pred[event_name] - sub_gt_list = gt_list[i][0] - # img_pr_info_list = np.zeros((len(img_list), thresh_num, 2)) - gt_bbx_list = facebox_list[i][0] - - for j in range(len(img_list)): - pred_info = pred_list[str(img_list[j][0][0])] - - gt_boxes = gt_bbx_list[j][0].astype('float') - keep_index = sub_gt_list[j][0] - count_face += len(keep_index) - - if len(gt_boxes) == 0 or len(pred_info) == 0: - continue - ignore = np.zeros(gt_boxes.shape[0]) - if len(keep_index) != 0: - ignore[keep_index - 1] = 1 - pred_recall, proposal_list = image_eval(pred_info, gt_boxes, ignore, iou_thresh) - - _img_pr_info = img_pr_info(thresh_num, pred_info, proposal_list, pred_recall) - - pr_curve += _img_pr_info - pr_curve = dataset_pr_info(thresh_num, pr_curve, count_face) - - propose = pr_curve[:, 0] - recall = pr_curve[:, 1] - - ap = voc_ap(recall, propose) - aps.append(ap) - return aps - - -class WIDERFace: - def __init__(self, root, split='val'): - self.aps = [] - self.widerface_root = root - self._split = split - - self.widerface_img_paths = { - 'val': os.path.join(self.widerface_root, 'WIDER_val', 'images'), - 'test': os.path.join(self.widerface_root, 'WIDER_test', 'images') - } - - self.widerface_split_fpaths = { - 'val': os.path.join(self.widerface_root, 'wider_face_split', 'wider_face_val.mat'), - 'test': os.path.join(self.widerface_root, 'wider_face_split', 'wider_face_test.mat') - } - self.img_list, self.num_img = self.load_list() - - @property - def name(self): - return self.__class__.__name__ - - def load_list(self): - n_imgs = 0 - flist = [] - - split_fpath = self.widerface_split_fpaths[self._split] - img_path = self.widerface_img_paths[self._split] - - anno_data = loadmat(split_fpath) - event_list = anno_data.get('event_list') - file_list = anno_data.get('file_list') - - for event_idx, event in enumerate(event_list): - event_name = event[0][0] - for f_idx, f in enumerate(file_list[event_idx][0]): - f_name = f[0][0] - f_path = os.path.join(img_path, event_name, f_name + '.jpg') - flist.append(f_path) - n_imgs += 1 - - return flist, n_imgs - - def __getitem__(self, index): - img = cv.imread(self.img_list[index]) - event, name = self.img_list[index].split(os.sep)[-2:] - return event, name, img - - def eval(self, model): - results_list = dict() - pbar = tqdm.tqdm(self) - pbar.set_description_str("Evaluating {} with {} val set".format(model.name, self.name)) - # forward - for event_name, img_name, img in pbar: - img_shape = [img.shape[1], img.shape[0]] - model.setInputSize(img_shape) - det = model.infer(img) - - if not results_list.get(event_name): - results_list[event_name] = dict() - - if det is None: - det = np.array([[10, 10, 20, 20, 0.002]]) - else: - det = np.append(np.around(det[:, :4], 1), np.around(det[:, -1], 3).reshape(-1, 1), axis=1) - - results_list[event_name][img_name.rstrip('.jpg')] = det - - self.aps = evaluation(results_list, os.path.join(self.widerface_root, 'eval_tools', 'ground_truth')) - - def print_result(self): - print("==================== Results ====================") - print("Easy Val AP: {}".format(self.aps[0])) - print("Medium Val AP: {}".format(self.aps[1])) - print("Hard Val AP: {}".format(self.aps[2])) - print("=================================================") diff --git a/tools/eval/eval.py b/tools/eval/eval.py deleted file mode 100644 index a046d5bd..00000000 --- a/tools/eval/eval.py +++ /dev/null @@ -1,182 +0,0 @@ -import os -import sys -import argparse - -import numpy as np -import cv2 as cv - -from datasets import DATASETS - -if "PYTHONPATH" in os.environ: - root_dir = os.environ["PYTHONPATH"] -else: - root_dir = os.path.join("..", "..") -sys.path.append(root_dir) -from models import MODELS - -parser = argparse.ArgumentParser("Evaluation with OpenCV on different models in the zoo.") -parser.add_argument("--model", "-m", type=str, required=True, help="model name") -parser.add_argument("--dataset", "-d", type=str, required=True, help="Dataset name") -parser.add_argument("--dataset_root", "-dr", type=str, required=True, help="Root directory of given dataset") -args = parser.parse_args() - -models = dict( - mobilenetv1=dict( - name="MobileNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr.onnx"), - topK=5, - loadLabel=False), - mobilenetv1_q=dict( - name="MobileNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8.onnx"), - topK=5, - loadLabel=False), - mobilenetv1_bq=dict( - name="MobileNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr_int8bq.onnx"), - topK=5, - loadLabel=False), - mobilenetv2=dict( - name="MobileNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr.onnx"), - topK=5, - loadLabel=False), - mobilenetv2_q=dict( - name="MobileNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8.onnx"), - topK=5, - loadLabel=False), - mobilenetv2_bq=dict( - name="MobileNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr_int8bq.onnx"), - topK=5, - loadLabel=False), - ppresnet=dict( - name="PPResNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_ppresnet/image_classification_ppresnet50_2022jan.onnx"), - topK=5, - loadLabel=False), - ppresnet_q=dict( - name="PPResNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8.onnx"), - topK=5, - loadLabel=False), - ppresnet_bq=dict( - name="PPResNet", - topic="image_classification", - modelPath=os.path.join(root_dir, "models/image_classification_ppresnet/image_classification_ppresnet50_2022jan_int8bq.onnx"), - topK=5, - loadLabel=False), - yunet=dict( - name="YuNet", - topic="face_detection", - modelPath=os.path.join(root_dir, "models/face_detection_yunet/face_detection_yunet_2023mar.onnx"), - topK=5000, - confThreshold=0.3, - nmsThreshold=0.45), - yunet_q=dict( - name="YuNet", - topic="face_detection", - modelPath=os.path.join(root_dir, "models/face_detection_yunet/face_detection_yunet_2023mar_int8.onnx"), - topK=5000, - confThreshold=0.3, - nmsThreshold=0.45), - yunet_bq=dict( - name="YuNet", - topic="face_detection", - modelPath=os.path.join(root_dir, "models/face_detection_yunet/face_detection_yunet_2023mar_int8bq.onnx"), - topK=5000, - confThreshold=0.3, - nmsThreshold=0.45), - sface=dict( - name="SFace", - topic="face_recognition", - modelPath=os.path.join(root_dir, "models/face_recognition_sface/face_recognition_sface_2021dec.onnx")), - sface_q=dict( - name="SFace", - topic="face_recognition", - modelPath=os.path.join(root_dir, "models/face_recognition_sface/face_recognition_sface_2021dec_int8.onnx")), - sface_bq=dict( - name="SFace", - topic="face_recognition", - modelPath=os.path.join(root_dir, "models/face_recognition_sface/face_recognition_sface_2021dec_int8bq.onnx")), - crnn_en=dict( - name="CRNN", - topic="text_recognition", - modelPath=os.path.join(root_dir, "models/text_recognition_crnn/text_recognition_CRNN_EN_2021sep.onnx")), - crnn_en_q=dict( - name="CRNN", - topic="text_recognition", - modelPath=os.path.join(root_dir, "models/text_recognition_crnn/text_recognition_CRNN_EN_2022oct_int8.onnx")), - pphumanseg=dict( - name="PPHumanSeg", - topic="human_segmentation", - modelPath=os.path.join(root_dir, "models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar.onnx")), - pphumanseg_q=dict( - name="PPHumanSeg", - topic="human_segmentation", - modelPath=os.path.join(root_dir, "models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8.onnx")), - pphumanseg_bq=dict( - name="PPHumanSeg", - topic="human_segmentation", - modelPath=os.path.join(root_dir, "models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar_int8bq.onnx")), -) - -datasets = dict( - imagenet=dict( - name="ImageNet", - topic="image_classification", - size=224), - widerface=dict( - name="WIDERFace", - topic="face_detection"), - lfw=dict( - name="LFW", - topic="face_recognition", - target_size=112), - icdar=dict( - name="ICDAR", - topic="text_recognition"), - iiit5k=dict( - name="IIIT5K", - topic="text_recognition"), - mini_supervisely=dict( - name="MiniSupervisely", - topic="human_segmentation"), -) - -def main(args): - # Instantiate model - model_key = args.model.lower() - assert model_key in models - - model_name = models[model_key].pop("name") - model_topic = models[model_key].pop("topic") - model_handler, _ = MODELS.get(model_name) - model = model_handler(**models[model_key]) - - # Instantiate dataset - dataset_key = args.dataset.lower() - assert dataset_key in datasets - - dataset_name = datasets[dataset_key].pop("name") - dataset_topic = datasets[dataset_key].pop("topic") - dataset = DATASETS.get(dataset_name)(root=args.dataset_root, **datasets[dataset_key]) - - # Check if model_topic matches dataset_topic - assert model_topic == dataset_topic - - # Run evaluation - dataset.eval(model) - dataset.print_result() - -if __name__ == "__main__": - main(args) diff --git a/tools/quantize/README.md b/tools/quantize/README.md deleted file mode 100644 index 2ef80180..00000000 --- a/tools/quantize/README.md +++ /dev/null @@ -1,69 +0,0 @@ -# Quantization with ONNXRUNTIME and Neural Compressor - -[ONNXRUNTIME](https://github.com/microsoft/onnxruntime) and [Neural Compressor](https://github.com/intel/neural-compressor) are used for quantization in the Zoo. - -Install dependencies before trying quantization: -```shell -pip install -r requirements.txt -``` - -## Quantization Usage - -Quantize all models in the Zoo: -```shell -python quantize-ort.py -python quantize-inc.py -``` - -Quantize one of the models in the Zoo: -```shell -# python quantize.py -python quantize-ort.py yunet -python quantize-inc.py mobilenetv1 -``` - -Customizing quantization configs: -```python -# Quantize with ONNXRUNTIME -# 1. add your model into `models` dict in quantize-ort.py -models = dict( - # ... - model1=Quantize(model_path='/path/to/model1.onnx', - calibration_image_dir='/path/to/images', - transforms=Compose([''' transforms ''']), # transforms can be found in transforms.py - per_channel=False, # set False to quantize in per-tensor style - act_type='int8', # available types: 'int8', 'uint8' - wt_type='int8' # available types: 'int8', 'uint8' - ) -) -# 2. quantize your model -python quantize-ort.py model1 - - -# Quantize with Intel Neural Compressor -# 1. add your model into `models` dict in quantize-inc.py -models = dict( - # ... - model1=Quantize(model_path='/path/to/model1.onnx', - config_path='/path/to/model1.yaml'), -) -# 2. prepare your YAML config model1.yaml (see configs in ./inc_configs) -# 3. quantize your model -python quantize-inc.py model1 -``` - -## Blockwise quantization usage - -Block-quantized models under each model directory are generated with `--block_size=64` - -`block_quantize.py` requires Python>=3.7 - -To perform weight-only blockwise quantization: - -```shell -python block_quantize.py --input_model INPUT_MODEL.onnx --output_model OUTPUT_MODEL.onnx --block_size {block size} --bits {8,16} -``` - -## Dataset -Some models are quantized with extra datasets. -- [MP-PalmDet](../../models/palm_detection_mediapipe) and [MP-HandPose](../../models/handpose_estimation_mediapipe) are quantized with evaluation set of [FreiHAND](https://lmb.informatik.uni-freiburg.de/resources/datasets/FreihandDataset.en.html). Download the dataset from [this link](https://lmb.informatik.uni-freiburg.de/data/freihand/FreiHAND_pub_v2_eval.zip). Unpack it and replace `path/to/dataset` with the path to `FreiHAND_pub_v2_eval/evaluation/rgb`. diff --git a/tools/quantize/block_quantize.py b/tools/quantize/block_quantize.py deleted file mode 100644 index 4eb3d638..00000000 --- a/tools/quantize/block_quantize.py +++ /dev/null @@ -1,513 +0,0 @@ -import sys - -MIN_PYTHON_VERSION = (3, 7) - -if sys.version_info < MIN_PYTHON_VERSION: - raise ImportError("This script requires Python 3.7 or higher!") - -import argparse -import os -from dataclasses import dataclass, field -from typing import Dict, Tuple -from enum import Enum, auto - -import numpy as np -import onnx -from onnx import helper - -BITS_TO_NUMPY_TYPE = {8: np.int8, 16: np.int16} - - -SUPPORTED_OPS = {"Conv", "Gemm", "MatMul"} - -ONNX_OPSET = 21 - - -class WeightCategory(Enum): - INITIALIZER = auto() - CONSTANT = auto() - NONE = auto() - - -@dataclass -class BlockQuantizeConfig: - input_model_path: str - output_model_path: str - block_size: int - bits: int - verbose: bool - - -@dataclass -class BlockQuantizeResult: - quantized_weights: np.ndarray = field(default_factory=lambda: np.array([])) - scales: np.ndarray = field(default_factory=lambda: np.array([])) - zero_point: np.ndarray = field(default_factory=lambda: np.array([])) - block_size: int = 1 - axis: int = 1 - original_shape: Tuple = field(default_factory=tuple) - quantization_error: np.ndarray = field(default_factory=lambda: np.array([])) - - -def closest_divisor(number: int, divisor: int) -> int: - for d in range(divisor, 0, -1): - if number % d == 0: - return d - return 1 - - -def block_dequantize_tensor( - x: np.ndarray, block_axis: int, scale: np.ndarray, zero_point: np.ndarray -) -> np.ndarray: - repeats = x.shape[block_axis] // scale.shape[block_axis] - - x_scale_elementwise = np.repeat(scale, repeats=repeats, axis=block_axis) - x_zero_point_elementwise = np.repeat(zero_point, repeats=repeats, axis=block_axis) - - y = ( - x.astype(np.float32) - x_zero_point_elementwise.astype(np.float32) - ) * x_scale_elementwise - - return y - - -def block_quantize_tensor( - x: np.ndarray, - block_axis: int, - scale: np.ndarray, - zero_point: np.ndarray, - n_bits: int, -) -> np.ndarray: - repeats = x.shape[block_axis] // scale.shape[block_axis] - - y_scale_elementwise = np.repeat(scale, repeats=repeats, axis=block_axis) - y_zero_point_elementwise = np.repeat(zero_point, repeats=repeats, axis=block_axis) - - type_info = np.iinfo(BITS_TO_NUMPY_TYPE[n_bits]) - min_value = type_info.min - max_value = type_info.max - - y = np.rint(x / y_scale_elementwise + y_zero_point_elementwise) - y = np.clip(y, min_value, max_value) - y = y.astype(BITS_TO_NUMPY_TYPE[n_bits]) - - return y - - -def create_dequantize_node( - node_name, - quantized_weights, - scales, - zero_point, - dequantized_weights, - block_size, - axis, -) -> onnx.NodeProto: - block_size_attr = helper.make_attribute("block_size", block_size) - axis_attr = helper.make_attribute("axis", axis) - - n = helper.make_node( - "DequantizeLinear", - inputs=[quantized_weights, scales, zero_point], - outputs=[dequantized_weights], - name=node_name, - ) - n.attribute.extend([block_size_attr, axis_attr]) - return n - - -def create_reshape_node( - node_name, dequantized_weights, shape_tensor, reshaped_weights_name -) -> onnx.NodeProto: - return helper.make_node( - "Reshape", - inputs=[dequantized_weights, shape_tensor], - outputs=[reshaped_weights_name], - name=node_name, - ) - - -class BlockQuantizer: - def __init__(self, conf: BlockQuantizeConfig) -> None: - self.conf = conf - self.validate_conf() - - self.model = onnx.load(conf.input_model_path) - - if self.model.opset_import[0].version != ONNX_OPSET: - self.model = onnx.version_converter.convert_version(self.model, ONNX_OPSET) - - self.graph = self.model.graph - self.initializers_map = { - init.name: init for init in self.model.graph.initializer - } - self.costants_map = { - node.output[0]: next( - attr.t for attr in node.attribute if attr.name == "value" - ) - for node in self.model.graph.node - if node.op_type == "Constant" - } - - def validate_conf(self): - if not os.path.isfile(self.conf.input_model_path): - raise ValueError( - f"Input model path '{self.conf.input_model_path}' does not exist or is not a file." - ) - - if not self.conf.input_model_path.lower().endswith(".onnx"): - raise ValueError( - f"Input model path '{self.conf.input_model_path}' must have a .onnx extension." - ) - - if not self.conf.output_model_path.lower().endswith(".onnx"): - raise ValueError( - f"Output model path '{self.conf.output_model_path}' must have a .onnx extension." - ) - - if self.conf.block_size <= 0: - raise ValueError("Block size must be a positive integer.") - - if self.conf.bits not in BITS_TO_NUMPY_TYPE: - allowed_values = ", ".join([str(k) for k in BITS_TO_NUMPY_TYPE.keys()]) - raise ValueError( - f"Bits must be one of the following values: [{allowed_values}]." - ) - - def get_weight_category(self, name: str) -> WeightCategory: - if name in self.initializers_map: - return WeightCategory.INITIALIZER - if name in self.costants_map: - return WeightCategory.CONSTANT - else: - return WeightCategory.NONE - - def get_weight_tensor(self, name: str, category: WeightCategory) -> np.ndarray: - if category == WeightCategory.INITIALIZER: - return onnx.numpy_helper.to_array(self.initializers_map[name]) - elif category == WeightCategory.CONSTANT: - return onnx.numpy_helper.to_array(self.costants_map[name]) - else: - raise AssertionError("Invalid weight category") - - def remove_fp32_weights(self, name: str, category: WeightCategory): - if category == WeightCategory.INITIALIZER: - self.graph.initializer.remove( - next(init for init in self.graph.initializer if init.name == name) - ) - elif category == WeightCategory.CONSTANT: - self.graph.node.remove( - next( - node - for node in self.graph.node - if node.op_type == "Constant" and node.output[0] == name - ) - ) - else: - raise AssertionError("Invalid weight category") - - def compute_scale_zeropoint( - self, b_min: np.ndarray, b_max: np.ndarray - ) -> Tuple[np.ndarray, np.ndarray]: - assert ( - b_min <= b_max - ).all(), "minimum must not be greater than maximum when computing scale and zero point" - - # zero must be present in the range, this enforces qmin <= zero_point <= qmax - b_min = np.minimum(b_min, np.zeros_like(b_min, dtype=b_min.dtype)) - b_max = np.maximum(b_max, np.zeros_like(b_max, dtype=b_max.dtype)) - - type_info = np.iinfo(BITS_TO_NUMPY_TYPE[self.conf.bits]) - qmin = type_info.min - qmax = type_info.max - - dq = qmax - qmin - - scales = np.where(b_max != b_min, (b_max - b_min) / dq, 1.0) - - zeropoints = np.where(b_max != b_min, np.rint(qmin - b_min / scales), 0.0) - zeropoints = zeropoints.astype(BITS_TO_NUMPY_TYPE[self.conf.bits]) - - return (scales, zeropoints) - - def block_quantize(self, weight: np.ndarray) -> BlockQuantizeResult: - original_shape = weight.shape - - if weight.ndim > 1: - weight = weight.reshape((weight.shape[0], -1)) - quantization_axis = 1 - else: - quantization_axis = 0 - - block_size = closest_divisor( - weight.shape[quantization_axis], self.conf.block_size - ) - - assert ( - weight.shape[quantization_axis] % block_size == 0 - ), f"weight shape ({weight.shape[quantization_axis]}) must be divisible by block size ({block_size})" - - # Flattening the tensor after the quantization axis - new_shape = list(weight.shape[: quantization_axis + 1]) + [-1] - new_shape[quantization_axis] = new_shape[quantization_axis] // block_size - - blocked_weight = weight.reshape(new_shape) - - blocked_max = np.max(blocked_weight, -1) - blocked_min = np.min(blocked_weight, -1) - - scales, zeropoints = self.compute_scale_zeropoint(blocked_min, blocked_max) - - quantized_weight = block_quantize_tensor( - weight, quantization_axis, scales, zeropoints, self.conf.bits - ) - reconstructed_mat = block_dequantize_tensor( - quantized_weight, quantization_axis, scales, zeropoints - ) - - # Relative Norm - qerror = np.linalg.norm(reconstructed_mat - weight) / (np.linalg.norm(weight) + 1e-10) - - res = BlockQuantizeResult( - quantized_weight, - scales, - zeropoints, - block_size, - quantization_axis, - original_shape, - qerror, - ) - - return res - - def get_model_size(self, model_path: str) -> float: - size_bytes = os.path.getsize(model_path) - size_mb = size_bytes / 1024 - - return size_mb - - def display_summary(self, sqe: Dict[str, int]): - sqe_v = list(sqe.values()) - if len(sqe_v) == 0: - mse = 0 - print( - "Warning: No weights have been quantized, likely due to unsupported layers." - ) - else: - mse = sum(sqe_v) / len(sqe_v) - original_model_size = self.get_model_size(self.conf.input_model_path) - quantized_model_size = self.get_model_size(self.conf.output_model_path) - - if self.conf.verbose: - sorted_sqe = sorted(sqe.items(), key=lambda item: item[1], reverse=True) - longest_key_len = max(len(key) for key in sqe.keys()) - - print("Quantization error (Relative Norm) sorted in ascending order:") - - for key, value in sorted_sqe: - print(f"{key:<{longest_key_len}} : {value}") - - print("Done! Results saved in", self.conf.output_model_path) - print("\nSummary of Results:\n") - print(f"{'Metric':<30} {'Value':<10}") - print(f"{'-'*40}") - print(f"{'Relative Norm Error':<31} {mse:.6f}") - print(f"{'Original Model Size (KB)':<31} {original_model_size:,.2f}") - print(f"{'Block-Quantized Model Size (KB)':<30} {quantized_model_size:,.2f}") - - def run(self): - print("Quantizing the model...") - - quantized_inputs = [] - sqe = {} - - node_idx = 0 - - while node_idx < len(self.model.graph.node): - node = self.model.graph.node[node_idx] - - if node.op_type in SUPPORTED_OPS: - for input_idx, input_name in enumerate(node.input): - weightCategory = self.get_weight_category(input_name) - - # Skip quantization if weights are taken as external input - if weightCategory == WeightCategory.NONE: - continue - - weight = self.get_weight_tensor(input_name, weightCategory) - - quantized_weights_name = f"{input_name}_quantized" - quantized_node_name = f"{input_name}_quantized_node" - dequantized_weights_name = f"{input_name}_dequantized" - scales_name = f"{input_name}_scales" - zero_point_name = f"{input_name}_zero_point" - - shape_node_name = f"{input_name}_shape_node" - shape_name = f"{input_name}_shape" - reshaped_weights_name = f"{input_name}_reshaped" - - # Skip quantization if weights don't contain enough elements to create at least 1 block - if weight.size < self.conf.block_size: - continue - - reshape_needed = weight.ndim > 2 - - # In case of parameter sharing - if input_name in quantized_inputs: - node.input[input_idx] = ( - reshaped_weights_name - if reshape_needed - else dequantized_weights_name - ) - continue - - - block_quantize_res = self.block_quantize(weight) - - # Skip quantization if it wouldn't reduce the model size - if block_quantize_res.block_size == 1: - continue - - quantized_inputs.append(input_name) - - dequantize_node = create_dequantize_node( - quantized_node_name, - quantized_weights_name, - scales_name, - zero_point_name, - dequantized_weights_name, - block_quantize_res.block_size, - block_quantize_res.axis, - ) - - if reshape_needed: - reshape_node = create_reshape_node( - shape_node_name, - dequantized_weights_name, - shape_name, - reshaped_weights_name, - ) - - shape_tensor = onnx.numpy_helper.from_array( - np.array(block_quantize_res.original_shape), name=shape_name - ) - scale_initializer = onnx.numpy_helper.from_array( - block_quantize_res.scales, name=scales_name - ) - zero_point_initializer = onnx.numpy_helper.from_array( - block_quantize_res.zero_point, name=zero_point_name - ) - quantized_weights_initializer = onnx.numpy_helper.from_array( - block_quantize_res.quantized_weights, - name=quantized_weights_name, - ) - - dequantized_weights_info = helper.make_tensor_value_info( - dequantized_weights_name, - onnx.TensorProto.FLOAT, - block_quantize_res.quantized_weights.shape, - ) - - if reshape_needed: - shape_info = helper.make_tensor_value_info( - reshaped_weights_name, - onnx.TensorProto.FLOAT, - block_quantize_res.original_shape, - ) - - self.graph.initializer.extend( - [ - scale_initializer, - zero_point_initializer, - shape_tensor, - quantized_weights_initializer, - ] - ) - - self.remove_fp32_weights(input_name, weightCategory) - - node.input[input_idx] = ( - reshaped_weights_name - if reshape_needed - else dequantized_weights_name - ) - - # Preserving graph nodes topological order - if reshape_needed: - self.graph.node.insert(0, reshape_node) - node_idx += 1 - - self.graph.node.insert(0, dequantize_node) - node_idx += 1 - if reshape_needed: - self.graph.value_info.insert(0, shape_info) - self.graph.value_info.insert(0, dequantized_weights_info) - - sqe[input_name] = block_quantize_res.quantization_error - - node_idx += 1 - - onnx.checker.check_model(self.model, full_check=True) - onnx.save(self.model, self.conf.output_model_path) - - self.display_summary(sqe) - - -def setup_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Blockwise quantization tool") - - parser.add_argument( - "-i", - "--input_model", - type=str, - help="The path of onnx model to quantize", - required=True, - ) - parser.add_argument( - "-bs", - "--block_size", - type=int, - help="The maximum size of quantization block", - required=True, - ) - parser.add_argument( - "-b", - "--bits", - type=int, - help="Quantization bits", - choices=[8, 16], - default=8, - required=False, - ) - parser.add_argument( - "-o", - "--output_model", - type=str, - help="The output model path", - default="block_quantized_model.onnx", - required=False, - ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - help="Enable verbose output", - required=False, - ) - - return parser.parse_args() - - -if __name__ == "__main__": - args = setup_args() - - quantization_config = BlockQuantizeConfig( - input_model_path=args.input_model, - output_model_path=args.output_model, - block_size=args.block_size, - bits=args.bits, - verbose=args.verbose - ) - - quantizer = BlockQuantizer(quantization_config) - quantizer.run() diff --git a/tools/quantize/inc_configs/fer.yaml b/tools/quantize/inc_configs/fer.yaml deleted file mode 100644 index 69380842..00000000 --- a/tools/quantize/inc_configs/fer.yaml +++ /dev/null @@ -1,38 +0,0 @@ -version: 1.0 - -model: # mandatory. used to specify model specific information. - name: fer - framework: onnxrt_qlinearops # mandatory. supported values are tensorflow, pytorch, pytorch_ipex, onnxrt_integer, onnxrt_qlinear or mxnet; allow new framework backend extension. - -quantization: # optional. tuning constraints on model-wise for advance user to reduce tuning space. - approach: post_training_static_quant # optional. default value is post_training_static_quant. - calibration: - dataloader: - batch_size: 1 - dataset: - dummy: - shape: [1, 3, 112, 112] - low: -1.0 - high: 1.0 - dtype: float32 - label: True - - model_wise: # optional. tuning constraints on model-wise for advance user to reduce tuning space. - weight: - granularity: per_tensor - scheme: asym - dtype: int8 - algorithm: minmax - activation: - granularity: per_tensor - scheme: asym - dtype: int8 - algorithm: minmax - -tuning: - accuracy_criterion: - relative: 0.02 # optional. default value is relative, other value is absolute. this example allows relative accuracy loss: 1%. - exit_policy: - timeout: 0 # optional. tuning timeout (seconds). default value is 0 which means early stop. combine with max_trials field to decide when to exit. - max_trials: 50 # optional. max tune times. default value is 100. combine with timeout field to decide when to exit. - random_seed: 9527 # optional. random seed for deterministic tuning. diff --git a/tools/quantize/inc_configs/lpd_yunet.yaml b/tools/quantize/inc_configs/lpd_yunet.yaml deleted file mode 100644 index 5e700c60..00000000 --- a/tools/quantize/inc_configs/lpd_yunet.yaml +++ /dev/null @@ -1,52 +0,0 @@ -# -# Copyright (c) 2021 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -version: 1.0 - -model: # mandatory. used to specify model specific information. - name: lpd_yunet - framework: onnxrt_qlinearops # mandatory. supported values are tensorflow, pytorch, pytorch_ipex, onnxrt_integer, onnxrt_qlinear or mxnet; allow new framework backend extension. - -quantization: # optional. tuning constraints on model-wise for advance user to reduce tuning space. - approach: post_training_static_quant # optional. default value is post_training_static_quant. - calibration: - dataloader: - batch_size: 1 - dataset: - dummy: - shape: [1, 3, 240, 320] - low: 0.0 - high: 127.0 - dtype: float32 - label: True - - model_wise: # optional. tuning constraints on model-wise for advance user to reduce tuning space. - weight: - granularity: per_tensor - scheme: asym - dtype: int8 - algorithm: minmax - activation: - granularity: per_tensor - scheme: asym - dtype: int8 - algorithm: minmax - -tuning: - accuracy_criterion: - relative: 0.02 # optional. default value is relative, other value is absolute. this example allows relative accuracy loss: 1%. - exit_policy: - timeout: 0 # optional. tuning timeout (seconds). default value is 0 which means early stop. combine with max_trials field to decide when to exit. - random_seed: 9527 # optional. random seed for deterministic tuning. diff --git a/tools/quantize/inc_configs/mobilenet.yaml b/tools/quantize/inc_configs/mobilenet.yaml deleted file mode 100644 index cedf006e..00000000 --- a/tools/quantize/inc_configs/mobilenet.yaml +++ /dev/null @@ -1,98 +0,0 @@ -# -# Copyright (c) 2021 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -version: 1.0 - -model: # mandatory. used to specify model specific information. - name: mobilenetv2 - framework: onnxrt_qlinearops # mandatory. supported values are tensorflow, pytorch, pytorch_ipex, onnxrt_integer, onnxrt_qlinear or mxnet; allow new framework backend extension. - -quantization: # optional. tuning constraints on model-wise for advance user to reduce tuning space. - approach: post_training_static_quant # optional. default value is post_training_static_quant. - calibration: - dataloader: - batch_size: 1 - dataset: - ImagenetRaw: - data_path: /path/to/imagenet/val - image_list: /path/to/imagenet/val.txt # download from http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz - transform: - Rescale: {} - Resize: - size: 256 - CenterCrop: - size: 224 - Normalize: - mean: [0.485, 0.456, 0.406] - std: [0.229, 0.224, 0.225] - Transpose: - perm: [2, 0, 1] - Cast: - dtype: float32 -evaluation: # optional. required if user doesn't provide eval_func in lpot.Quantization. - accuracy: # optional. required if user doesn't provide eval_func in lpot.Quantization. - metric: - topk: 1 # built-in metrics are topk, map, f1, allow user to register new metric. - dataloader: - batch_size: 1 - dataset: - ImagenetRaw: - data_path: /path/to/imagenet/val - image_list: /path/to/imagenet/val.txt # download from http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz - transform: - Rescale: {} - Resize: - size: 256 - CenterCrop: - size: 224 - Normalize: - mean: [0.485, 0.456, 0.406] - std: [0.229, 0.224, 0.225] - Transpose: - perm: [2, 0, 1] - Cast: - dtype: float32 - performance: # optional. used to benchmark performance of passing model. - warmup: 10 - iteration: 1000 - configs: - cores_per_instance: 4 - num_of_instance: 1 - dataloader: - batch_size: 1 - dataset: - ImagenetRaw: - data_path: /path/to/imagenet/val - image_list: /path/to/imagenet/val.txt # download from http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz - transform: - Rescale: {} - Resize: - size: 256 - CenterCrop: - size: 224 - Normalize: - mean: [0.485, 0.456, 0.406] - std: [0.229, 0.224, 0.225] - Transpose: - perm: [2, 0, 1] - Cast: - dtype: float32 - -tuning: - accuracy_criterion: - relative: 0.02 # optional. default value is relative, other value is absolute. this example allows relative accuracy loss: 1%. - exit_policy: - timeout: 0 # optional. tuning timeout (seconds). default value is 0 which means early stop. combine with max_trials field to decide when to exit. - random_seed: 9527 # optional. random seed for deterministic tuning. diff --git a/tools/quantize/inc_configs/mp_handpose.yaml b/tools/quantize/inc_configs/mp_handpose.yaml deleted file mode 100644 index 1ef66a27..00000000 --- a/tools/quantize/inc_configs/mp_handpose.yaml +++ /dev/null @@ -1,52 +0,0 @@ -# -# Copyright (c) 2021 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -version: 1.0 - -model: # mandatory. used to specify model specific information. - name: mp_handpose - framework: onnxrt_qlinearops # mandatory. supported values are tensorflow, pytorch, pytorch_ipex, onnxrt_integer, onnxrt_qlinear or mxnet; allow new framework backend extension. - -quantization: # optional. tuning constraints on model-wise for advance user to reduce tuning space. - approach: post_training_static_quant # optional. default value is post_training_static_quant. - calibration: - dataloader: - batch_size: 1 - dataset: - dummy: - shape: [1, 256, 256, 3] - low: -1.0 - high: 1.0 - dtype: float32 - label: True - - model_wise: # optional. tuning constraints on model-wise for advance user to reduce tuning space. - weight: - granularity: per_tensor - scheme: asym - dtype: int8 - algorithm: minmax - activation: - granularity: per_tensor - scheme: asym - dtype: int8 - algorithm: minmax - -tuning: - accuracy_criterion: - relative: 0.02 # optional. default value is relative, other value is absolute. this example allows relative accuracy loss: 1%. - exit_policy: - timeout: 0 # optional. tuning timeout (seconds). default value is 0 which means early stop. combine with max_trials field to decide when to exit. - random_seed: 9527 # optional. random seed for deterministic tuning. diff --git a/tools/quantize/quantize-inc.py b/tools/quantize/quantize-inc.py deleted file mode 100644 index f72f5c92..00000000 --- a/tools/quantize/quantize-inc.py +++ /dev/null @@ -1,150 +0,0 @@ -import os -import sys -import numpy as np -import cv2 as cv - -import onnx -from neural_compressor.experimental import Quantization, common -from neural_compressor.experimental.metric import BaseMetric - - -class Accuracy(BaseMetric): - def __init__(self, *args): - self.pred_list = [] - self.label_list = [] - self.samples = 0 - - def update(self, predict, label): - predict = np.array(predict) - label = np.array(label) - self.pred_list.append(np.argmax(predict[0])) - self.label_list.append(label[0][0]) - self.samples += 1 - - def reset(self): - self.pred_list = [] - self.label_list = [] - self.samples = 0 - - def result(self): - correct_num = np.sum(np.array(self.pred_list) == np.array(self.label_list)) - return correct_num / self.samples - - -class Quantize: - def __init__(self, model_path, config_path, custom_dataset=None, eval_dataset=None, metric=None): - self.model_path = model_path - self.config_path = config_path - self.custom_dataset = custom_dataset - self.eval_dataset = eval_dataset - self.metric = metric - - def run(self): - print('Quantizing (int8) with Intel\'s Neural Compressor:') - print('\tModel: {}'.format(self.model_path)) - print('\tConfig: {}'.format(self.config_path)) - - output_name = '{}-int8-quantized.onnx'.format(self.model_path[:-5]) - - model = onnx.load(self.model_path) - quantizer = Quantization(self.config_path) - quantizer.model = common.Model(model) - if self.custom_dataset is not None: - quantizer.calib_dataloader = common.DataLoader(self.custom_dataset) - if self.eval_dataset is not None: - quantizer.eval_dataloader = common.DataLoader(self.eval_dataset) - if self.metric is not None: - quantizer.metric = common.Metric(metric_cls=self.metric, name='metric') - q_model = quantizer() - q_model.save(output_name) - - -class Dataset: - def __init__(self, root, size=None, dim='chw', scale=1.0, mean=0.0, std=1.0, swapRB=False, toFP32=False): - self.root = root - self.size = size - self.dim = dim - self.scale = scale - self.mean = mean - self.std = std - self.swapRB = swapRB - self.toFP32 = toFP32 - - self.image_list, self.label_list = self.load_image_list(self.root) - - def load_image_list(self, path): - image_list = [] - label_list = [] - for f in os.listdir(path): - if not f.endswith('.jpg'): - continue - image_list.append(os.path.join(path, f)) - label_list.append(1) - return image_list, label_list - - def __getitem__(self, idx): - img = cv.imread(self.image_list[idx]) - - if self.swapRB: - img = cv.cvtColor(img, cv.COLOR_BGR2RGB) - - if self.size: - img = cv.resize(img, dsize=self.size) - - if self.toFP32: - img = img.astype(np.float32) - - img = img * self.scale - img = img - self.mean - img = img / self.std - - if self.dim == 'chw': - img = img.transpose(2, 0, 1) # hwc -> chw - - return img, self.label_list[idx] - - def __len__(self): - return len(self.image_list) - - -class FerDataset(Dataset): - def __init__(self, root, size=None, dim='chw', scale=1.0, mean=0.0, std=1.0, swapRB=False, toFP32=False): - super(FerDataset, self).__init__(root, size, dim, scale, mean, std, swapRB, toFP32) - - def load_image_list(self, path): - image_list = [] - label_list = [] - for f in os.listdir(path): - if not f.endswith('.jpg'): - continue - image_list.append(os.path.join(path, f)) - label_list.append(int(f.split("_")[2])) - return image_list, label_list - - -models = dict( - mobilenetv1=Quantize(model_path='../../models/image_classification_mobilenet/image_classification_mobilenetv1_2022apr.onnx', - config_path='./inc_configs/mobilenet.yaml'), - mobilenetv2=Quantize(model_path='../../models/image_classification_mobilenet/image_classification_mobilenetv2_2022apr.onnx', - config_path='./inc_configs/mobilenet.yaml'), - mp_handpose=Quantize(model_path='../../models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2022may.onnx', - config_path='./inc_configs/mp_handpose.yaml', - custom_dataset=Dataset(root='../../benchmark/data/palm_detection', dim='hwc', swapRB=True, mean=127.5, std=127.5, toFP32=True)), - fer=Quantize(model_path='../../models/facial_expression_recognition/facial_expression_recognition_mobilefacenet_2022july.onnx', - config_path='./inc_configs/fer.yaml', - custom_dataset=FerDataset(root='../../benchmark/data/facial_expression_recognition/fer_calibration', size=(112, 112), toFP32=True, swapRB=True, scale=1./255, mean=0.5, std=0.5), - eval_dataset=FerDataset(root='../../benchmark/data/facial_expression_recognition/fer_evaluation', size=(112, 112), toFP32=True, swapRB=True, scale=1./255, mean=0.5, std=0.5), - metric=Accuracy), -) - -if __name__ == '__main__': - selected_models = [] - for i in range(1, len(sys.argv)): - selected_models.append(sys.argv[i]) - if not selected_models: - selected_models = list(models.keys()) - print('Models to be quantized: {}'.format(str(selected_models))) - - for selected_model_name in selected_models: - q = models[selected_model_name] - q.run() diff --git a/tools/quantize/quantize-ort.py b/tools/quantize/quantize-ort.py deleted file mode 100644 index aba57f71..00000000 --- a/tools/quantize/quantize-ort.py +++ /dev/null @@ -1,149 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import os -import sys -import numpy as np -import cv2 as cv - -import onnx -from onnx import version_converter -import onnxruntime -from onnxruntime.quantization import quantize_static, CalibrationDataReader, QuantType, QuantFormat, quant_pre_process - -from transform import Compose, Resize, CenterCrop, Normalize, ColorConvert, HandAlign - -class DataReader(CalibrationDataReader): - def __init__(self, model_path, image_dir, transforms, data_dim): - model = onnx.load(model_path) - self.input_name = model.graph.input[0].name - self.transforms = transforms - self.data_dim = data_dim - self.data = self.get_calibration_data(image_dir) - self.enum_data_dicts = iter([{self.input_name: x} for x in self.data]) - - def get_next(self): - return next(self.enum_data_dicts, None) - - def get_calibration_data(self, image_dir): - blobs = [] - supported = ["jpg", "png"] # supported file suffix - for image_name in os.listdir(image_dir): - image_name_suffix = image_name.split('.')[-1].lower() - if image_name_suffix not in supported: - continue - img = cv.imread(os.path.join(image_dir, image_name)) - img = self.transforms(img) - if img is None: - continue - blob = cv.dnn.blobFromImage(img) - if self.data_dim == 'hwc': - blob = cv.transposeND(blob, [0, 2, 3, 1]) - blobs.append(blob) - return blobs - -class Quantize: - def __init__(self, model_path, calibration_image_dir, transforms=Compose(), per_channel=False, act_type='int8', wt_type='int8', data_dim='chw', nodes_to_exclude=[]): - self.type_dict = {"uint8" : QuantType.QUInt8, "int8" : QuantType.QInt8} - - self.model_path = model_path - self.calibration_image_dir = calibration_image_dir - self.transforms = transforms - self.per_channel = per_channel - self.act_type = act_type - self.wt_type = wt_type - self.nodes_to_exclude = nodes_to_exclude - - # data reader - self.dr = DataReader(self.model_path, self.calibration_image_dir, self.transforms, data_dim) - - def check_opset(self): - model = onnx.load(self.model_path) - if model.opset_import[0].version != 13: - print('\tmodel opset version: {}. Converting to opset 13'.format(model.opset_import[0].version)) - # convert opset version to 13 - model_opset13 = version_converter.convert_version(model, 13) - # save converted model - output_name = '{}-opset13.onnx'.format(self.model_path[:-5]) - onnx.save_model(model_opset13, output_name) - # update model_path for quantization - return output_name - return self.model_path - - def run(self): - print('Quantizing {}: act_type {}, wt_type {}'.format(self.model_path, self.act_type, self.wt_type)) - new_model_path = self.check_opset() - quant_pre_process(new_model_path, new_model_path) - output_name = '{}_{}.onnx'.format(self.model_path[:-5], self.wt_type) - quantize_static(new_model_path, output_name, self.dr, - quant_format=QuantFormat.QOperator, # start from onnxruntime==1.11.0, quant_format is set to QuantFormat.QDQ by default, which performs fake quantization - per_channel=self.per_channel, - weight_type=self.type_dict[self.wt_type], - activation_type=self.type_dict[self.act_type], - nodes_to_exclude=self.nodes_to_exclude) - if new_model_path != self.model_path: - os.remove(new_model_path) - print('\tQuantized model saved to {}'.format(output_name)) - -models=dict( - yunet=Quantize(model_path='../../models/face_detection_yunet/face_detection_yunet_2023mar.onnx', - calibration_image_dir='../../benchmark/data/face_detection', - transforms=Compose([Resize(size=(160, 120))]), - nodes_to_exclude=['MaxPool_5', 'MaxPool_18', 'MaxPool_25', 'MaxPool_32'], - ), - sface=Quantize(model_path='../../models/face_recognition_sface/face_recognition_sface_2021dec.onnx', - calibration_image_dir='../../benchmark/data/face_recognition', - transforms=Compose([Resize(size=(112, 112))])), - pphumanseg=Quantize(model_path='../../models/human_segmentation_pphumanseg/human_segmentation_pphumanseg_2023mar.onnx', - calibration_image_dir='../../benchmark/data/human_segmentation', - transforms=Compose([Resize(size=(192, 192))])), - ppresnet50=Quantize(model_path='../../models/image_classification_ppresnet/image_classification_ppresnet50_2022jan.onnx', - calibration_image_dir='../../benchmark/data/image_classification', - transforms=Compose([Resize(size=(224, 224))])), - # TBD: VitTrack - youtureid=Quantize(model_path='../../models/person_reid_youtureid/person_reid_youtu_2021nov.onnx', - calibration_image_dir='../../benchmark/data/person_reid', - transforms=Compose([Resize(size=(128, 256))])), - ppocrv3det_en=Quantize(model_path='../../models/text_detection_ppocr/text_detection_en_ppocrv3_2023may.onnx', - calibration_image_dir='../../benchmark/data/text', - transforms=Compose([Resize(size=(736, 736)), - Normalize(mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375])])), - ppocrv3det_cn=Quantize(model_path='../../models/text_detection_ppocr/text_detection_cn_ppocrv3_2023may.onnx', - calibration_image_dir='../../benchmark/data/text', - transforms=Compose([Resize(size=(736, 736)), - Normalize(mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375])])), - crnn_en=Quantize(model_path='../../models/text_recognition_crnn/text_recognition_CRNN_EN_2021sep.onnx', - calibration_image_dir='../../benchmark/data/text', - transforms=Compose([Resize(size=(100, 32)), Normalize(mean=[127.5, 127.5, 127.5], std=[127.5, 127.5, 127.5]), ColorConvert(ctype=cv.COLOR_BGR2GRAY)])), - crnn_cn=Quantize(model_path='../../models/text_recognition_crnn/text_recognition_CRNN_CN_2021nov.onnx', - calibration_image_dir='../../benchmark/data/text', - transforms=Compose([Resize(size=(100, 32))])), - mp_palmdet=Quantize(model_path='../../models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb.onnx', - calibration_image_dir='path/to/dataset', - transforms=Compose([Resize(size=(192, 192)), Normalize(std=[255, 255, 255]), - ColorConvert(ctype=cv.COLOR_BGR2RGB)]), data_dim='hwc'), - mp_handpose=Quantize(model_path='../../models/handpose_estimation_mediapipe/handpose_estimation_mediapipe_2023feb.onnx', - calibration_image_dir='path/to/dataset', - transforms=Compose([HandAlign("mp_handpose"), Resize(size=(224, 224)), Normalize(std=[255, 255, 255]), - ColorConvert(ctype=cv.COLOR_BGR2RGB)]), data_dim='hwc'), - lpd_yunet=Quantize(model_path='../../models/license_plate_detection_yunet/license_plate_detection_lpd_yunet_2023mar.onnx', - calibration_image_dir='../../benchmark/data/license_plate_detection', - transforms=Compose([Resize(size=(320, 240))]), - nodes_to_exclude=['MaxPool_5', 'MaxPool_18', 'MaxPool_25', 'MaxPool_32', 'MaxPool_39'], - ), -) - -if __name__ == '__main__': - selected_models = [] - for i in range(1, len(sys.argv)): - selected_models.append(sys.argv[i]) - if not selected_models: - selected_models = list(models.keys()) - print('Models to be quantized: {}'.format(str(selected_models))) - - for selected_model_name in selected_models: - q = models[selected_model_name] - q.run() diff --git a/tools/quantize/requirements.txt b/tools/quantize/requirements.txt deleted file mode 100644 index d8519a95..00000000 --- a/tools/quantize/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -opencv-python>=4.10.0 -numpy -onnx -onnxruntime -onnxruntime-extensions -neural-compressor diff --git a/tools/quantize/transform.py b/tools/quantize/transform.py deleted file mode 100644 index 10d97521..00000000 --- a/tools/quantize/transform.py +++ /dev/null @@ -1,129 +0,0 @@ -# This file is part of OpenCV Zoo project. -# It is subject to the license terms in the LICENSE file found in the same directory. -# -# Copyright (C) 2021, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved. -# Third party copyrights are property of their respective owners. - -import collections -import numpy as np -import cv2 as cv -import sys - -class Compose: - def __init__(self, transforms=[]): - self.transforms = transforms - - def __call__(self, img): - for t in self.transforms: - img = t(img) - if img is None: - break - return img - -class Resize: - def __init__(self, size, interpolation=cv.INTER_LINEAR): - self.size = size - self.interpolation = interpolation - - def __call__(self, img): - return cv.resize(img, self.size) - -class CenterCrop: - def __init__(self, size): - self.size = size # w, h - - def __call__(self, img): - h, w, _ = img.shape - ws = int(w / 2 - self.size[0] / 2) - hs = int(h / 2 - self.size[1] / 2) - return img[hs:hs+self.size[1], ws:ws+self.size[0], :] - -class Normalize: - def __init__(self, mean=None, std=None): - self.mean = mean - self.std = std - - def __call__(self, img): - img = img.astype("float32") - if self.mean is not None: - img[:, :, 0] = img[:, :, 0] - self.mean[0] - img[:, :, 1] = img[:, :, 1] - self.mean[1] - img[:, :, 2] = img[:, :, 2] - self.mean[2] - if self.std is not None: - img[:, :, 0] = img[:, :, 0] / self.std[0] - img[:, :, 1] = img[:, :, 1] / self.std[1] - img[:, :, 2] = img[:, :, 2] / self.std[2] - return img - -class ColorConvert: - def __init__(self, ctype): - self.ctype = ctype - - def __call__(self, img): - return cv.cvtColor(img, self.ctype) - -class HandAlign: - def __init__(self, model): - self.model = model - sys.path.append('../../models/palm_detection_mediapipe') - from mp_palmdet import MPPalmDet - self.palm_detector = MPPalmDet(modelPath='../../models/palm_detection_mediapipe/palm_detection_mediapipe_2023feb.onnx', nmsThreshold=0.3, scoreThreshold=0.9) - - def __call__(self, img): - return self.mp_handpose_align(img) - - def mp_handpose_align(self, img): - palms = self.palm_detector.infer(img) - if len(palms) == 0: - return None - palm = palms[0] - palm_bbox = palm[0:4].reshape(2, 2) - palm_landmarks = palm[4:18].reshape(7, 2) - p1 = palm_landmarks[0] - p2 = palm_landmarks[2] - radians = np.pi / 2 - np.arctan2(-(p2[1] - p1[1]), p2[0] - p1[0]) - radians = radians - 2 * np.pi * np.floor((radians + np.pi) / (2 * np.pi)) - angle = np.rad2deg(radians) - # get bbox center - center_palm_bbox = np.sum(palm_bbox, axis=0) / 2 - # get rotation matrix - rotation_matrix = cv.getRotationMatrix2D(center_palm_bbox, angle, 1.0) - # get rotated image - rotated_image = cv.warpAffine(img, rotation_matrix, (img.shape[1], img.shape[0])) - # get bounding boxes from rotated palm landmarks - homogeneous_coord = np.c_[palm_landmarks, np.ones(palm_landmarks.shape[0])] - rotated_palm_landmarks = np.array([ - np.dot(homogeneous_coord, rotation_matrix[0]), - np.dot(homogeneous_coord, rotation_matrix[1])]) - # get landmark bounding box - rotated_palm_bbox = np.array([ - np.amin(rotated_palm_landmarks, axis=1), - np.amax(rotated_palm_landmarks, axis=1)]) # [top-left, bottom-right] - - # shift bounding box - wh_rotated_palm_bbox = rotated_palm_bbox[1] - rotated_palm_bbox[0] - shift_vector = [0, -0.1] * wh_rotated_palm_bbox - rotated_palm_bbox = rotated_palm_bbox + shift_vector - # squarify bounding boxx - center_rotated_plam_bbox = np.sum(rotated_palm_bbox, axis=0) / 2 - wh_rotated_palm_bbox = rotated_palm_bbox[1] - rotated_palm_bbox[0] - new_half_size = np.amax(wh_rotated_palm_bbox) / 2 - rotated_palm_bbox = np.array([ - center_rotated_plam_bbox - new_half_size, - center_rotated_plam_bbox + new_half_size]) - - # enlarge bounding box - center_rotated_plam_bbox = np.sum(rotated_palm_bbox, axis=0) / 2 - wh_rotated_palm_bbox = rotated_palm_bbox[1] - rotated_palm_bbox[0] - new_half_size = wh_rotated_palm_bbox * 1.5 - rotated_palm_bbox = np.array([ - center_rotated_plam_bbox - new_half_size, - center_rotated_plam_bbox + new_half_size]) - - # Crop the rotated image by the bounding box - [[x1, y1], [x2, y2]] = rotated_palm_bbox.astype(np.int32) - diff = np.maximum([-x1, -y1, x2 - rotated_image.shape[1], y2 - rotated_image.shape[0]], 0) - [x1, y1, x2, y2] = [x1, y1, x2, y2] + diff - crop = rotated_image[y1:y2, x1:x2, :] - crop = cv.copyMakeBorder(crop, diff[1], diff[3], diff[0], diff[2], cv.BORDER_CONSTANT, value=(0, 0, 0)) - return crop From daa0cd3d475d2083c4aa38fd30754efbecc0e429 Mon Sep 17 00:00:00 2001 From: muhmammadawaisofficial Date: Mon, 9 Feb 2026 02:06:02 +0500 Subject: [PATCH 2/2] feat: Add benchmark results for Windows (Intel i5-12400F) --- benchmark/README.md | 1003 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1003 insertions(+) create mode 100644 benchmark/README.md diff --git a/benchmark/README.md b/benchmark/README.md new file mode 100644 index 00000000..e3121eef --- /dev/null +++ b/benchmark/README.md @@ -0,0 +1,1003 @@ +# OpenCV Zoo Benchmark + +Benchmarking the speed of OpenCV DNN inferring different models in the zoo. Result of each model includes the time of its preprocessing, inference and postprocessing stages. + +Data for benchmarking will be downloaded and loaded in [data](./data) based on given config. + +## Preparation + +1. Install `python >= 3.6`. +2. Install dependencies: `pip install -r requirements.txt`. +3. Download data for benchmarking. + 1. Download all data: `python download_data.py` + 2. Download one or more specified data: `python download_data.py face text`. Available names can be found in `download_data.py`. + 3. You can also download all data from https://pan.baidu.com/s/18sV8D4vXUb2xC9EG45k7bg (code: pvrw). Please place and extract data packages under [./data](./data). + +## Benchmarking + +**Linux**: + +```shell +export PYTHONPATH=$PYTHONPATH:.. + +# Single config +python benchmark.py --cfg ./config/face_detection_yunet.yaml + +# All configs +python benchmark.py --all + +# All configs but only fp32 models (--fp32, --fp16, --int8 --int8bq are available for now) +python benchmark.py --all --fp32 + +# All configs but exclude some of them (fill with config name keywords, not sensitive to upper/lower case, seperate with colons) +python benchmark.py --all --cfg_exclude wechat +python benchmark.py --all --cfg_exclude wechat:crnn + +# All configs but exclude some of the models (fill with exact model names, sensitive to upper/lower case, seperate with colons) +python benchmark.py --all --model_exclude license_plate_detection_lpd_yunet_2023mar_int8.onnx:human_segmentation_pphumanseg_2023mar_int8.onnx + +# All configs with overwritten backend and target (run with --help to get available combinations) +python benchmark.py --all --cfg_overwrite_backend_target 1 +``` + +**Windows**: +- CMD + ```shell + set PYTHONPATH=%PYTHONPATH%;.. + python benchmark.py --cfg ./config/face_detection_yunet.yaml + ``` + +- PowerShell + ```shell + $env:PYTHONPATH=$env:PYTHONPATH+";.." + python benchmark.py --cfg ./config/face_detection_yunet.yaml + ``` + +## Detailed Results + +Benchmark is done with latest opencv-python & opencv-contrib-python (current 4.10.0) on the following platforms. Some models are excluded because of support issues. + +### Intel 12700K + +Specs: [details](https://www.intel.com/content/www/us/en/products/sku/134594/intel-core-i712700k-processor-25m-cache-up-to-5-00-ghz/specifications.html) +- CPU: 8 Performance-cores, 4 Efficient-cores, 20 threads + - Performance-core: 3.60 GHz base freq, turbo up to 4.90 GHz + - Efficient-core: 2.70 GHz base freq, turbo up to 3.80 GHz + +CPU: + +``` +$ python3 benchmark.py --all +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +0.69 0.70 0.68 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +0.79 0.80 0.68 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +5.09 5.13 4.96 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +6.50 6.79 4.96 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +1.79 1.76 1.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +2.92 3.11 1.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +2.40 2.43 2.37 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +3.11 3.15 2.37 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +5.59 5.56 5.28 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +6.07 6.22 5.28 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +3.13 3.14 3.05 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +3.04 3.02 2.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +3.46 3.03 2.92 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +3.84 3.77 2.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +19.47 19.47 19.08 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +21.52 21.86 19.08 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +5.68 5.66 5.51 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +7.41 7.36 5.51 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +41.02 40.99 40.86 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +42.23 42.30 40.86 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +78.77 79.76 77.16 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +75.69 75.58 72.57 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +4.01 3.84 3.79 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +5.35 5.41 5.22 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +6.73 6.85 5.22 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +7.65 7.65 7.55 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +15.56 15.57 15.10 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +16.67 16.57 15.10 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +6.33 6.63 6.14 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +1.19 1.30 1.07 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] +18.76 19.59 18.48 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +18.59 19.33 18.12 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +22.05 18.60 18.12 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +24.47 25.06 18.12 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +10.61 10.66 10.50 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +11.03 11.23 10.50 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +9.85 11.62 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +10.02 9.71 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +9.53 7.83 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +9.68 9.21 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +9.85 10.63 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +9.63 9.28 7.74 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Raspberry Pi 4B + +Specs: [details](https://www.raspberrypi.com/products/raspberry-pi-4-model-b/specifications/) +- CPU: Broadcom BCM2711, Quad core Cortex-A72 (ARM v8) 64-bit SoC @ 1.5 GHz. + +CPU: + +``` +$ python3 benchmark.py --all +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +6.23 6.27 6.18 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +6.68 6.73 6.18 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +68.82 69.06 68.45 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +87.42 89.84 68.45 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +27.81 27.77 27.67 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +35.71 36.67 27.67 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +42.58 42.41 42.25 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +46.49 46.95 42.25 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +71.35 71.62 70.78 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +73.81 74.23 70.78 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +64.20 64.30 63.98 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +57.91 58.41 52.53 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +61.35 52.83 52.53 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +61.49 61.28 52.53 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +420.93 420.73 419.04 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +410.96 395.74 364.68 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +153.87 152.71 140.85 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +157.86 145.90 140.85 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +214.59 211.95 210.98 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +215.09 238.39 208.18 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +1614.13 1639.80 1476.58 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +1597.92 1599.12 1476.58 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +48.55 46.87 41.75 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +97.05 95.40 80.93 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +112.39 116.22 80.93 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +105.60 113.27 88.55 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +478.89 498.05 444.14 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +442.56 477.87 369.59 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +116.15 120.13 106.81 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +5.90 5.90 5.81 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] +325.02 325.88 303.55 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +323.54 332.45 303.55 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +372.32 328.56 303.55 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +407.90 411.97 303.55 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +235.70 236.07 234.87 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +240.95 241.14 234.87 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +226.09 247.02 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +229.25 224.63 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +224.10 201.29 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +223.58 219.82 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +225.60 243.89 200.44 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +220.97 223.16 193.91 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Jetson Nano B01 + +Specs: [details](https://developer.nvidia.com/embedded/jetson-nano-developer-kit) +- CPU: Quad-core ARM A57 @ 1.43 GHz +- GPU: 128-core NVIDIA Maxwell + +CPU: + +``` +$ python3 benchmark.py --all +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +5.62 5.54 5.52 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +6.14 6.24 5.52 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +64.80 64.95 64.60 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +78.31 79.85 64.60 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +26.54 26.61 26.37 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +33.96 34.85 26.37 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +38.45 41.45 38.20 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +42.62 43.20 38.20 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +64.95 64.85 64.73 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +72.39 73.16 64.73 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +65.72 65.98 65.59 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +56.66 57.56 49.10 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +62.09 49.27 49.10 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +62.17 62.02 49.10 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +346.78 348.06 345.53 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +371.11 373.54 345.53 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +134.36 134.33 133.45 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +140.62 140.94 133.45 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +215.67 216.76 214.69 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +216.58 216.78 214.69 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +1209.12 1213.05 1201.68 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +1240.02 1249.95 1201.68 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +48.39 47.38 45.00 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +75.30 75.25 74.96 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +83.83 84.99 74.96 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +87.65 87.59 87.37 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +356.78 357.77 355.69 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +346.84 351.10 335.96 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +75.20 79.36 73.71 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +5.56 5.56 5.48 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] +209.80 210.04 208.84 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +209.60 212.74 208.49 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +254.56 211.17 208.49 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +286.57 296.56 208.49 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +252.60 252.48 252.21 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +259.28 261.38 252.21 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +245.18 266.94 220.49 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +247.72 244.25 220.49 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +241.63 221.43 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +243.46 238.98 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +246.87 256.05 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +243.37 238.90 219.06 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +GPU (CUDA-FP32): + +``` +$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 1 +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_CUDA +target=cv.dnn.DNN_TARGET_CUDA +mean median min input size model +10.99 10.71 9.64 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +25.25 25.81 24.54 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +13.97 14.01 13.72 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +24.47 24.36 23.69 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +67.25 67.99 64.90 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +28.96 28.92 28.85 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +28.61 28.45 27.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +98.80 100.11 94.57 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +54.88 56.51 52.78 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +63.86 63.59 63.35 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +371.32 374.79 367.78 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +47.26 45.56 44.69 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +37.61 37.61 33.64 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +37.39 37.71 37.03 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +90.84 91.34 85.77 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +76.44 78.00 74.90 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +112.68 112.21 110.42 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +112.48 111.86 110.04 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +43.99 43.33 41.68 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +44.97 44.42 41.68 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +36.77 46.38 21.77 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +``` + +GPU (CUDA-FP16): + +``` +$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 2 +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_CUDA +target=cv.dnn.DNN_TARGET_CUDA_FP16 +mean median min input size model +25.05 25.05 24.95 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +117.82 126.96 113.17 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +88.54 88.33 88.04 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +97.43 97.38 96.98 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +69.40 68.28 66.36 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +120.92 131.57 119.37 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +128.43 128.08 119.37 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +64.90 63.88 62.81 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +370.21 371.97 366.38 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +164.28 164.75 162.94 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +299.22 300.54 295.64 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +49.61 47.58 47.14 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +149.50 151.12 147.24 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +156.59 154.01 153.92 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +43.66 43.64 43.31 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +75.87 77.33 74.38 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +428.97 428.99 426.11 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +428.66 427.46 425.66 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +32.41 31.90 31.68 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +33.42 35.75 31.68 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +29.34 36.44 21.27 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +``` + +### Khadas VIM3 + +Specs: [details](https://www.khadas.com/vim3) +- (SoC) CPU: Amlogic A311D, 2.2 GHz Quad core ARM Cortex-A73 and 1.8 GHz dual core Cortex-A53 +- NPU: 5 TOPS Performance NPU INT8 inference up to 1536 MAC Supports all major deep learning frameworks including TensorFlow and Caffe + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +4.62 4.62 4.53 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +5.24 5.29 4.53 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +55.04 54.55 53.54 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +67.34 67.96 53.54 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +29.50 45.62 26.14 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +35.59 36.22 26.14 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +35.80 35.08 34.76 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +40.32 45.32 34.76 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +71.92 66.92 62.98 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +70.68 72.31 62.98 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +59.27 53.91 52.09 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +52.17 67.58 41.23 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +55.44 47.28 41.23 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +55.83 56.80 41.23 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +335.75 329.39 325.42 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +340.42 335.78 325.42 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +128.58 127.15 124.03 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +125.85 126.47 110.14 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +179.93 170.66 166.76 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +178.61 213.72 164.61 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +1108.12 1100.93 1072.45 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +1100.58 1121.31 982.74 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +32.20 32.84 30.99 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +78.26 78.96 75.60 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +87.18 88.22 75.60 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +83.22 84.20 80.07 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +327.07 339.80 321.98 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +316.56 302.60 269.10 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +75.38 73.67 70.15 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +211.02 213.14 199.28 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +210.19 217.15 199.28 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +242.34 225.59 199.28 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +265.33 271.87 199.28 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +194.77 195.13 192.69 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +197.16 200.94 192.69 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +185.45 199.47 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +187.64 180.57 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +182.53 166.96 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +182.90 178.97 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +184.26 194.43 161.37 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +180.65 180.59 155.36 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +NPU (TIMVX): + +``` +$ python3 benchmark.py --all --int8 --cfg_overwrite_backend_target 3 +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_TIMVX +target=cv.dnn.DNN_TARGET_NPU +mean median min input size model +5.24 7.45 4.77 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +45.96 46.10 43.21 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +30.25 30.30 28.68 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +19.75 20.18 18.19 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +28.75 28.85 28.47 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +148.80 148.85 143.45 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +143.17 141.11 136.58 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +73.19 78.57 62.89 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +32.11 30.50 29.97 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +116.32 120.72 99.40 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +408.18 418.89 374.12 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +37.34 38.57 32.03 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +41.82 39.84 37.63 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +160.70 160.90 153.15 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +160.47 160.48 151.88 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +239.38 237.47 231.95 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +197.61 201.16 162.69 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +196.69 164.78 162.69 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Atlas 200 DK + +Specs: [details_en](https://e.huawei.com/uk/products/cloud-computing-dc/atlas/atlas-200), [details_cn](https://www.hiascend.com/zh/hardware/developer-kit) +- (SoC) CPU: 8-core Coretext-A55 @ 1.6 GHz (max) +- NPU: Ascend 310, dual DaVinci AI cores, 22/16/8 TOPS INT8. + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +7.82 7.82 7.77 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +8.57 8.77 7.77 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +92.21 92.11 91.87 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +122.07 126.02 91.87 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +42.93 43.26 42.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +55.91 57.40 42.75 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +67.85 67.91 67.47 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +70.06 70.21 67.47 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +102.49 102.65 102.10 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +114.02 116.16 102.10 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +92.66 92.49 92.36 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +79.39 80.75 68.47 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +89.66 68.66 68.47 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +90.59 92.13 68.47 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +499.55 500.15 498.36 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +571.85 580.88 498.36 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +201.99 201.55 200.62 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +216.72 217.34 200.62 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +313.66 313.85 312.13 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +322.98 323.45 312.13 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +1875.33 1877.53 1871.26 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +1989.04 2005.25 1871.26 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +143.62 143.19 137.16 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +159.80 159.62 159.40 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +152.18 152.86 145.56 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +145.83 145.77 145.45 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +521.46 521.66 520.28 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +541.50 544.02 520.28 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +134.02 136.01 132.06 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +360.26 360.82 359.13 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +361.22 361.51 359.13 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +427.85 362.87 359.13 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +475.44 490.06 359.13 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +285.19 284.91 284.69 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +318.96 323.30 284.69 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +289.82 360.87 244.07 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +285.40 303.13 244.07 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +274.67 244.47 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +277.84 262.99 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +283.02 280.77 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +279.21 262.55 243.87 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +NPU (CANN): + + + +``` +$ python3 benchmark.py --all --fp32 --cfg_exclude wechat:crnn:vittrack --model_exclude pose_estimation_mediapipe_2023mar.onnx --cfg_overwrite_backend_target 4 +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_CANN +target=cv.dnn.DNN_TARGET_NPU +mean median min input size model +2.24 2.21 2.19 [160, 120] YuNet with ['face_detection_yunet_2022mar.onnx'] +2.66 2.66 2.64 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +2.19 2.19 2.16 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +6.27 6.22 6.17 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +6.94 6.94 6.85 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +5.15 5.13 5.10 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +5.41 5.42 5.10 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +6.99 6.99 6.95 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +7.63 7.64 7.43 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +20.62 22.09 19.16 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +28.59 28.60 27.91 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +5.17 5.26 5.09 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +16.45 16.44 16.31 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +5.58 5.57 5.54 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +``` + +### Toybrick RV1126 + +Specs: [details](https://t.rock-chips.com/en/portal.php?mod=view&aid=26) +- CPU: Quard core ARM Cortex-A7, up to 1.5GHz +- NPU (Not supported by OpenCV): 2.0TOPS, support 8bit / 16bit + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +56.78 56.74 56.46 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +51.16 51.41 45.18 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +1737.74 1733.23 1723.65 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +1298.48 1336.02 920.44 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +609.51 611.79 584.89 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +500.21 517.38 399.97 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +465.12 471.89 445.36 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +389.95 385.01 318.29 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +10.16.66.1781623.94 1607.90 1595.09 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +1109.61 1186.03 671.15 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +1567.09 1578.61 1542.75 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +1188.83 1219.46 850.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +996.30 884.80 689.11 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +849.51 805.93 507.78 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +11855.64 11836.80 11750.10 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +7752.60 8149.00 4429.83 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +3260.22 3251.14 3204.85 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +2287.10 2400.53 1482.04 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +2335.89 2335.93 2313.63 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +1899.16 1945.72 1529.46 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +37600.81 37558.85 37414.98 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +24185.35 25519.27 13395.47 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +411.41 448.29 397.86 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +905.77 890.22 866.06 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +780.94 817.69 653.26 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +1315.48 1321.44 1299.68 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +11143.23 11155.05 11105.11 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +7056.60 7457.76 3753.42 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +736.02 732.90 701.14 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +4267.03 4288.42 4229.69 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +4265.58 4276.54 4222.22 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +3678.65 4265.95 2636.57 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +3383.73 3490.66 2636.57 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +2180.44 2197.45 2152.67 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +2217.08 2241.77 2152.67 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +2217.15 2251.65 2152.67 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +2206.73 2219.60 2152.63 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +2208.84 2219.14 2152.63 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +2035.98 2185.05 1268.94 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +1927.93 2178.84 1268.94 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +1822.23 2213.30 1183.93 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Khadas Edge2 (with RK3588) + +Board specs: [details](https://www.khadas.com/edge2) +SoC specs: [details](https://www.rock-chips.com/a/en/products/RK35_Series/2022/0926/1660.html) +- CPU: 2.25GHz Quad Core ARM Cortex-A76 + 1.8GHz Quad Core Cortex-A55 +- NPU (Not supported by OpenCV): Build-in 6 TOPS Performance NPU, triple core, support int4 / int8 / int16 / fp16 / bf16 / tf32 + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +2.30 2.29 2.26 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +2.70 2.73 2.26 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +28.94 29.00 28.60 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +37.46 38.85 28.60 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +12.44 12.40 12.36 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +17.14 17.64 12.36 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +20.22 20.36 20.08 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +23.11 23.50 20.08 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +29.63 29.78 28.61 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +35.57 35.61 28.61 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +27.45 27.46 27.25 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +22.95 23.37 19.13 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +27.50 19.40 19.13 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +28.46 29.33 19.13 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +151.10 151.79 146.96 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +181.69 184.19 146.96 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +53.83 52.64 50.24 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +60.95 60.06 50.24 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +98.03 104.53 83.47 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +106.91 110.68 83.47 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +554.30 550.32 538.99 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +591.95 599.62 538.99 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +14.02 13.89 13.56 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +45.03 44.65 43.28 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +50.87 52.24 43.28 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +42.90 42.68 42.40 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +148.01 146.42 139.56 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +159.16 155.98 139.56 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +37.06 37.43 36.39 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +103.42 104.24 101.26 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +103.41 104.41 100.08 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +126.21 103.90 100.08 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +142.53 147.66 100.08 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +69.49 69.52 69.17 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +70.63 70.69 69.17 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +67.15 72.03 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +67.74 66.72 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +66.26 61.46 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +67.36 65.65 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +68.52 69.93 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +68.36 65.65 61.13 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Horizon Sunrise X3 PI + +Specs: [details_cn](https://developer.horizon.ai/sunrise) +- CPU: ARM Cortex-A53,4xCore, 1.2G +- BPU (aka NPU, not supported by OpenCV): (Bernoulli Arch) 2×Core,up to 1.0G, ~5Tops + +CPU: + +``` +$ python3 benchmark.py --all +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +10.56 10.69 10.46 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +12.45 12.60 10.46 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +124.80 127.36 124.45 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +168.67 174.03 124.45 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +55.12 55.38 54.91 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +76.31 79.00 54.91 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +77.44 77.53 77.07 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +89.22 90.40 77.07 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +132.95 133.21 132.35 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +147.40 149.99 132.35 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +119.71 120.69 119.32 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +102.57 104.40 88.49 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +114.56 88.81 88.49 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +117.12 116.07 88.49 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +653.39 653.85 651.99 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +706.43 712.61 651.99 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +252.05 252.16 250.98 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +273.03 274.27 250.98 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +399.35 405.40 390.82 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +413.37 410.75 390.82 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +2516.91 2516.82 2506.54 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +2544.65 2551.55 2506.54 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +84.15 85.18 77.31 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +168.54 169.05 168.15 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +196.46 199.81 168.15 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +172.55 172.83 171.85 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +678.74 678.04 677.44 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +653.71 655.74 631.68 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +162.87 165.82 160.04 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +9.93 9.97 9.82 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] +475.98 475.34 472.72 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +475.90 477.57 472.44 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +585.72 475.98 472.44 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +663.34 687.10 472.44 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +446.82 445.92 444.32 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +453.60 456.07 444.32 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +427.47 463.88 381.10 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +432.15 421.18 381.10 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +420.61 386.28 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +425.24 426.69 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +431.14 447.85 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +424.77 417.01 380.35 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### MAIX-III AX-PI + +Specs: [details_en](https://wiki.sipeed.com/hardware/en/maixIII/ax-pi/axpi.html#Hardware), [details_cn](https://wiki.sipeed.com/hardware/zh/maixIII/ax-pi/axpi.html#%E7%A1%AC%E4%BB%B6%E5%8F%82%E6%95%B0) +SoC specs: [details_cn](https://axera-tech.com/product/T7297367876123493768) +- CPU: Quad cores ARM Cortex-A7 +- NPU (Not supported by OpenCV): 14.4Tops@int4,3.6Tops@int8 + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +83.95 83.76 83.62 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +79.35 79.92 75.47 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +2326.96 2326.49 2326.08 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +1950.83 1988.86 1648.47 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +823.42 823.35 822.50 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +750.31 757.91 691.41 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +664.73 664.61 663.84 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +596.29 603.96 540.72 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +2175.34 2173.62 2172.91 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +1655.11 1705.43 1236.22 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +2123.08 2122.92 2122.18 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +1619.08 1672.32 1215.05 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +1470.74 1216.86 1215.05 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +1287.09 1242.01 873.92 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +15841.89 15841.20 15828.32 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +11652.03 12079.50 8299.15 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +4371.75 4396.81 4370.29 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +3428.89 3521.87 2670.46 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +3421.19 3412.22 3411.20 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +2990.22 3034.11 2645.09 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +50633.38 50617.44 50614.78 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +36260.23 37731.28 24683.40 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +548.36 551.97 537.90 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +1285.54 1285.40 1284.43 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +1204.04 1211.89 1137.65 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +1849.87 1848.78 1847.80 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +14895.99 14894.27 14884.17 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +10496.44 10931.97 6976.60 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +1045.98 1052.05 1040.56 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +5899.23 5900.08 5896.73 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +5889.39 5890.58 5878.81 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +5436.61 5884.03 4665.77 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +5185.53 5273.76 4539.47 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +3230.95 3226.14 3225.53 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +3281.31 3295.46 3225.53 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +3247.56 3337.52 3196.25 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +3243.20 3276.35 3196.25 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +3230.49 3196.80 3195.02 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +3065.33 3217.99 2348.42 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +2976.24 3244.75 2348.42 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +2864.72 3219.46 2208.44 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### StarFive VisionFive 2 + +Specs: [details_cn](https://doc.rvspace.org/VisionFive2/PB/VisionFive_2/specification_pb.html), [details_en](https://doc-en.rvspace.org/VisionFive2/Product_Brief/VisionFive_2/specification_pb.html) +- CPU: StarFive JH7110 with RISC-V quad-core CPU with 2 MB L2 cache and a monitor core, supporting RV64GC ISA, working up to 1.5 GHz +- GPU: IMG BXE-4-32 MC1 with work frequency up to 600 MHz + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +41.13 41.07 41.06 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +37.43 37.83 34.35 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +1169.96 1169.72 1168.74 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +887.13 987.00 659.71 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +423.91 423.98 423.62 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +350.89 358.26 292.27 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +319.69 319.26 318.76 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +278.74 282.75 245.22 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +1127.61 1127.36 1127.17 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +785.44 819.07 510.77 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +1079.69 1079.66 1079.31 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +820.15 845.54 611.26 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +698.13 612.64 516.41 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +600.12 564.13 382.59 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +8116.21 8127.96 8113.70 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +5408.02 5677.71 3240.16 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +2267.96 2268.26 2266.59 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +1605.80 1671.91 1073.50 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +1731.61 1733.17 1730.54 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +1435.43 1477.52 1196.01 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +26185.41 26190.85 26168.68 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +17019.14 17923.20 9673.68 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +288.95 290.28 260.40 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +628.64 628.47 628.27 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +562.90 569.91 509.93 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +910.38 910.94 909.64 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +7613.64 7626.26 7606.07 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +4895.28 5166.85 2716.65 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +524.52 526.33 522.71 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +2988.22 2996.51 2980.17 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +2981.84 2979.74 2975.80 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +2610.78 2979.14 1979.37 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +2425.29 2478.92 1979.37 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +1404.01 1415.46 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +1425.42 1426.51 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +1432.21 1450.47 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +1425.24 1448.27 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +1428.84 1446.76 1401.36 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +1313.68 1427.46 808.70 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +1242.07 1408.93 808.70 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +1174.32 1426.07 774.78 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Khadas VIM4 + +Board specs: https://www.khadas.com/vim4, https://dl.khadas.com/products/vim4/specs/vim4-specs.pdf + +SoC specs: +- CPU: Amlogic A311D2, 2.2GHz Quad core ARM Cortex-A73 and 2.0GHz Quad core Cortex-A53 CPU, with 32-bit STM32G031K6 microprocessor. +- GPU: Mali G52MP8(8EE) 800Mhz GPU. +- NPU: 3.2 TOPS Build-in NPU (Not supported by dnn yet) + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +4.27 4.33 4.17 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +4.58 4.58 4.17 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +39.94 39.98 39.42 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +49.33 50.59 39.42 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +17.28 17.63 16.93 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +22.78 23.27 16.93 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +25.83 25.46 25.30 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +28.23 28.87 25.30 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +47.68 47.72 45.65 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +49.25 49.45 45.65 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +38.73 38.18 37.89 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +33.68 33.99 29.16 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +36.22 29.50 29.16 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +36.12 35.69 29.16 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +219.81 220.21 215.97 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +224.03 222.27 215.97 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +81.46 84.07 77.95 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +81.46 83.07 77.95 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +136.14 136.12 128.61 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +136.57 136.30 128.61 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +805.54 805.23 795.82 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +768.87 766.00 727.12 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +29.47 29.39 28.49 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +54.45 54.76 53.45 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +60.84 61.07 53.45 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +57.22 57.22 56.14 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +218.22 224.50 215.54 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +199.53 203.24 179.85 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +53.06 54.61 51.82 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +148.82 149.62 146.73 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +148.91 148.99 146.59 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +175.33 150.60 146.59 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +194.12 201.48 146.59 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +133.27 132.90 132.54 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +135.27 135.12 132.54 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +127.49 137.43 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +129.18 125.95 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +125.82 114.44 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +127.63 124.81 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +129.24 134.50 113.82 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +126.64 125.09 110.45 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Jetson Nano Orin + +Specs: https://www.nvidia.com/en-us/autonomous-machines/embedded-systems/jetson-orin/ +- CPU: 6-core Arm® Cortex®-A78AE v8.2 64-bit CPU, 1.5MB L2 + 4MB L3 +- GPU: 1024-core NVIDIA Ampere architecture GPU with 32 Tensor Cores, max freq 625MHz + +CPU: + +``` +$ python3 benchmark.py --all +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +2.59 2.62 2.50 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +2.98 2.97 2.50 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +20.05 24.76 19.75 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +31.84 32.72 19.75 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +9.15 9.22 9.04 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +14.33 15.35 9.04 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +15.00 15.17 14.80 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +18.37 18.63 14.80 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +24.86 25.09 24.12 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +30.17 34.51 24.12 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +18.47 18.55 18.23 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +17.08 17.30 15.80 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +21.26 15.89 15.80 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +23.19 24.15 15.80 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +102.30 101.90 101.44 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +142.33 146.24 101.44 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +39.91 39.01 38.46 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +51.35 50.70 38.46 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +125.31 126.50 121.92 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +132.95 133.67 121.92 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +400.91 430.48 384.87 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +476.63 509.48 384.87 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +19.16 19.91 18.04 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +27.73 26.93 26.72 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +35.16 41.14 26.72 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +33.05 33.18 32.67 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +93.58 94.02 92.36 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +119.80 153.20 92.36 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +31.51 32.19 30.69 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +3.53 3.53 3.51 [100, 100] WeChatQRCode with ['detect_2021nov.prototxt', 'detect_2021nov.caffemodel', 'sr_2021nov.prototxt', 'sr_2021nov.caffemodel'] +78.10 77.77 77.17 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +78.03 78.38 77.17 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +99.09 79.42 77.17 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +112.82 116.06 77.17 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +142.97 142.84 135.56 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +144.53 148.52 135.56 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +134.47 146.62 112.91 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +136.37 131.39 112.91 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +132.08 117.15 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +135.17 130.23 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +138.38 143.25 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +137.08 134.22 109.24 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +GPU (CUDA-FP32): + +``` +$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 1 +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_CUDA +target=cv.dnn.DNN_TARGET_CUDA +mean median min input size model +5.23 5.27 5.17 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +7.59 7.62 7.55 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +8.48 8.46 8.37 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +12.29 13.04 11.11 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +12.91 13.28 12.79 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +8.41 8.42 8.35 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +9.36 9.43 8.35 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +32.58 32.71 31.11 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +16.33 16.08 16.04 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +24.46 24.35 24.01 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +103.28 103.41 102.37 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +19.75 19.78 19.10 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +10.84 10.76 10.75 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +14.50 14.50 14.36 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +23.53 23.36 23.16 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +26.54 27.22 25.99 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +27.49 27.80 26.97 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +27.53 27.75 26.95 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +15.66 16.30 15.41 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +15.91 15.80 15.41 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +13.58 16.70 9.48 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +``` + +GPU (CUDA-FP16): + +``` +$ python3 benchmark.py --all --fp32 --cfg_exclude wechat --cfg_overwrite_backend_target 2 +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_CUDA +target=cv.dnn.DNN_TARGET_CUDA_FP16 +mean median min input size model +5.00 5.04 4.92 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +5.09 5.08 5.05 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +6.81 6.86 6.66 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +9.19 10.18 9.06 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +16.20 16.62 15.93 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +6.84 6.82 6.80 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +7.46 7.87 6.80 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +14.18 14.16 14.03 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +13.35 13.10 13.04 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +19.94 19.95 19.50 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +72.25 72.91 70.99 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +22.37 22.44 21.60 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +8.92 8.92 8.84 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +11.11 11.13 10.98 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +13.22 13.23 13.12 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +26.79 27.04 26.24 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +19.71 19.75 19.47 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +19.76 19.93 19.47 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +16.30 15.88 15.80 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +16.36 16.51 15.80 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +13.64 16.27 8.90 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +``` + +### Atlas 200I DK A2 + +Specs: https://www.hiascend.com/hardware/developer-kit-a2 (cn) +- CPU: 4 core * 1.0 GHz +- NPU: Ascend 310B, 8 TOPS INT8, 4 TFLOPS FP16 (Benchmark results are coming later) + +CPU: + +``` +$ python3 benchmark.py --all --cfg_exclude wechat +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +6.67 6.80 5.17 [160, 120] YuNet with ['face_detection_yunet_2023mar.onnx'] +8.70 9.22 5.17 [160, 120] YuNet with ['face_detection_yunet_2023mar_int8.onnx'] +78.90 81.48 74.18 [150, 150] SFace with ['face_recognition_sface_2021dec.onnx'] +113.79 115.49 74.18 [150, 150] SFace with ['face_recognition_sface_2021dec_int8.onnx'] +36.94 38.64 33.23 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july.onnx'] +55.14 60.34 33.23 [112, 112] FacialExpressionRecog with ['facial_expression_recognition_mobilefacenet_2022july_int8.onnx'] +56.00 55.56 51.99 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb.onnx'] +71.09 72.20 51.99 [224, 224] MPHandPose with ['handpose_estimation_mediapipe_2023feb_int8.onnx'] +78.01 80.36 73.97 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar.onnx'] +111.56 113.84 73.97 [192, 192] PPHumanSeg with ['human_segmentation_pphumanseg_2023mar_int8.onnx'] +70.20 68.69 65.12 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr.onnx'] +61.72 63.39 48.28 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr.onnx'] +80.12 54.37 48.28 [224, 224] MobileNet with ['image_classification_mobilenetv1_2022apr_int8.onnx'] +87.42 96.71 48.28 [224, 224] MobileNet with ['image_classification_mobilenetv2_2022apr_int8.onnx'] +417.31 417.30 406.17 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan.onnx'] +597.15 619.24 406.17 [224, 224] PPResNet with ['image_classification_ppresnet50_2022jan_int8.onnx'] +155.73 153.40 145.10 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar.onnx'] +200.41 200.24 145.10 [320, 240] LPD_YuNet with ['license_plate_detection_lpd_yunet_2023mar_int8.onnx'] +253.05 252.73 245.91 [416, 416] NanoDet with ['object_detection_nanodet_2022nov.onnx'] +274.44 269.76 245.91 [416, 416] NanoDet with ['object_detection_nanodet_2022nov_int8.onnx'] +1407.75 1416.44 1357.23 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +1716.25 1709.35 1357.23 [640, 640] YoloX with ['object_detection_yolox_2022nov_int8.onnx'] +37.02 37.66 32.50 [1280, 720] VitTrack with ['object_tracking_vittrack_2023sep.onnx'] +92.56 97.78 87.87 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb.onnx'] +119.29 123.56 87.87 [192, 192] MPPalmDet with ['palm_detection_mediapipe_2023feb_int8.onnx'] +90.13 90.75 87.78 [224, 224] MPPersonDet with ['person_detection_mediapipe_2023mar.onnx'] +285.75 284.54 278.06 [128, 256] YoutuReID with ['person_reid_youtu_2021nov.onnx'] +389.02 405.12 278.06 [128, 256] YoutuReID with ['person_reid_youtu_2021nov_int8.onnx'] +83.16 85.91 77.83 [256, 256] MPPose with ['pose_estimation_mediapipe_2023mar.onnx'] +219.28 220.74 214.53 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may.onnx'] +217.18 227.44 207.15 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may.onnx'] +319.73 210.22 207.15 [640, 480] PPOCRDet with ['text_detection_cn_ppocrv3_2023may_int8.onnx'] +396.47 399.45 207.15 [640, 480] PPOCRDet with ['text_detection_en_ppocrv3_2023may_int8.onnx'] +165.34 172.10 156.36 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2021sep.onnx'] +169.22 174.21 156.36 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov.onnx'] +158.82 172.23 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2021sep.onnx'] +159.39 156.42 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2023feb_fp16.onnx'] +155.87 146.82 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2023feb_fp16.onnx'] +163.43 152.16 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_CH_2022oct_int8.onnx'] +173.46 162.85 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_CN_2021nov_int8.onnx'] +175.28 145.22 135.52 [1280, 720] CRNN with ['text_recognition_CRNN_EN_2022oct_int8.onnx'] +``` + +### Windows (Intel 12th Gen Core i5) + +Specs: +- CPU: Intel(R) Core(TM) i5-12400F + +CPU: + +``` +$ python benchmark.py --cfg config/object_detection_yolox.yaml +Benchmarking ... +backend=cv.dnn.DNN_BACKEND_OPENCV +target=cv.dnn.DNN_TARGET_CPU +mean median min input size model +264.86 265.03 254.11 [640, 640] YoloX with ['object_detection_yolox_2022nov.onnx'] +```